repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
Transkribus/TWI-edit
views.py
1
16936
#imports of python modules import json import sys import re import random from xml.etree import ElementTree #Imports of django modules from django.http import HttpResponse from django.http import JsonResponse from django.shortcuts import render from django.http import HttpResponseRedirect from django.utils import translation from django.contrib.auth.models import User from django.contrib.auth.decorators import login_required from django.contrib import messages from django.utils.translation import ugettext_lazy as _ from django.template.loader import render_to_string from django.utils.html import escape #Imports pf <del>read</del> utils modules from apps.utils.services import * from apps.utils.utils import crop import settings import apps.edit.settings from apps.navigation import navigation #Imports from app (library) #import library.settings #import library.navigation# TODO Fix this import!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! #from library.forms import RegisterForm, IngestMetsUrlForm, MetsFileForm #from profiler import profile #profile is a decorator, but things get circular if I include it in decorators.py so... @login_required #def proofread(request, collId, docId, page=None, transcriptId=None):# TODO Decide whether to select which transcript to work with unless it should always be the newest? def proofread(request, collId, docId, page, transcriptId=None):# TODO Decide whether to select which transcript to work with unless it should always be the newest? t = request.user.tsdata.t #RM default to page 1 # if page is None : # page = 1 current_transcript = t.current_transcript(request, collId, docId, page) if isinstance(current_transcript,HttpResponse): return apps.utils.views.error_view(request,current_transcript) transcript = t.transcript(request, current_transcript.get("tsId"), current_transcript.get("url")) if isinstance(transcript,HttpResponse): return apps.utils.views.error_view(request,transcript) transcriptId = str(transcript.get("tsId")) if request.method == 'POST':# This is by JQuery... content = json.loads(request.POST.get('content')) transcript_xml = t.transcript_xml(request, transcriptId, current_transcript.get("url")) if isinstance(transcript_xml,HttpResponse): return apps.utils.views.error_view(request,transcript_xml) transcript_root = ElementTree.fromstring(transcript_xml) # TODO Decide what to do about regionId... It's not necessary.... for text_region in transcript_root.iter('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextRegion'):# We have to have the namespace... regionTextEquiv = "" for line in text_region.iter('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextLine'): modified_text = content.get(line.get("id")) # Only lines which have changed are submitted... if None == modified_text: modified_text = line.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv').find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode').text else: line.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv').find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode').text = modified_text regionTextEquiv += modified_text +"\r\n" text_region.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv').find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode').text = regionTextEquiv t.save_transcript(request, ElementTree.tostring(transcript_root), collId, docId, page, transcriptId) current_transcript = t.current_transcript(request, collId, docId, page)# We want the updated transcript now. if isinstance(current_transcript,HttpResponse): return apps.utils.views.error_view(request,current_transcript) return HttpResponse(str(_("Transcript saved!")), content_type="text/plain") else: regions=transcript.get("PcGts").get("Page").get("TextRegion"); if isinstance(regions, dict): regions = [regions] lineList = [] if regions: for x in regions: lines = x.get("TextLine") if isinstance(lines, dict): lineList.extend([lines]) else: # Assume that lines is a list of lines for line in lines: lineList.extend([line]) # TODO Use "readingorder"? if lineList: for line in lineList: line['crop'] = crop(line.get("Coords").get("@points"))#,True) line['id'] = line.get("@id") line['Unicode'] = line.get('TextEquiv').get('Unicode') #RM need to test whether this has been successful document = t.document(request, collId, docId, -1) if isinstance(document,HttpResponse): return apps.utils.views.error_view(request,document) return render(request, 'edit/proofread.html', { 'imageUrl': document.get('pageList').get('pages')[int(page) - 1].get("url"), 'lines': lineList }) @login_required def correct(request, collId, docId, page=None, transcriptId=None):# TODO Decide whether to select which transcript to work with unless it should always be the newest? #def correct(request, collId, docId, page, transcriptId=None):# TODO Decide whether to select which transcript to work with unless it should always be the newest? t = request.user.tsdata.t #RM default to page 1 if page is None : page = 1 #Use this to get the role of the current user untils such time as it is available from t.collection role = apps.utils.utils.get_role(request,collId) if 'edit' in request.path and not (role == 'Editor' or role == 'Owner' or role == 'Admin' or role == 'CrowdTranscriber' or role == 'Transcriber'): t_log('Redirect user due to insufficient role access. [from: %s to: %s]' % (request.get_full_path(), request.get_full_path().replace('edit', 'view'))) return HttpResponseRedirect(request.get_full_path().replace('edit', 'view')) current_transcript = t.current_transcript(request, collId, docId, page) if isinstance(current_transcript,HttpResponse): return apps.utils.views.error_view(request,current_transcript) transcript = t.transcript(request, current_transcript.get("tsId"), current_transcript.get("url")) if isinstance(transcript,HttpResponse): return apps.utils.views.error_view(request,transcript) #RM Add arrow-in-breadcrumb-bar navigation to sibling documents collection = t.collection(request, {'collId': collId}) # nav = navigation.up_next_prev(request,"document",docId,collection,[collId]) navdata = navigation.get_nav(collection,docId,'docId','title') transcriptId = str(transcript.get("tsId")) if request.method == 'POST':# This is by JQuery... if 'content' in request.POST: content = json.loads(request.POST.get('content')) transcript_xml = t.transcript_xml(request, transcriptId, current_transcript.get("url")) if isinstance(transcript_xml,HttpResponse): return apps.utils.views.error_view(request,transcript_xml) transcript_root = ElementTree.fromstring(transcript_xml) # TODO Decide what to do about regionId... It's not necessary.... for text_region in transcript_root.iter('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextRegion'):# We have to have the namespace... regionTextEquiv = "" for line in text_region.iter('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextLine'): modified_content = content.get(text_region.get("id") + line.get("id")) if "custom" in modified_content : line.set("custom", modified_content.get("custom")) if "Unicode" in modified_content : modified_text = modified_content.get("Unicode") regionTextEquiv += modified_text +"\r\n" t_equiv = line.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv') ############################################################## # RM in cases where the is no TextQuiv (or Unicde) tag already # We must make one before attempting to add modified text ############################################################# if t_equiv is None : t_equiv = ElementTree.SubElement(line,'{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv') ElementTree.SubElement(t_equiv,'{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode') t_equiv.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode').text = modified_text r_text_equiv = text_region.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv') ############################################################## # RM in cases where the is no TextQuiv (or Unicde) tag already # We must make one before attempting to add modified text ############################################################# if r_text_equiv is None: r_text_equiv = ElementTree.SubElement(text_region,'{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}TextEquiv') ElementTree.SubElement(r_text_equiv,'{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode') r_text_equiv.find('{http://schema.primaresearch.org/PAGE/gts/pagecontent/2013-07-15}Unicode').text = regionTextEquiv t.save_transcript(request, ElementTree.tostring(transcript_root), collId, docId, page, transcriptId) current_transcript = t.current_transcript(request, collId, docId, page)# We want the updated transcript now. #RM add some error catching (though somewhat suboptimal) if isinstance(current_transcript,HttpResponse): t_log("current_transcript request has failed... %s" % current_transcript) #For now this will do but there may be other reasons the transckribus request fails... return apps.utils.views.error_view(request, current_transcript) success_message = str(_("Transcript saved!")) return HttpResponse(success_message, content_type="text/plain") elif 'status' in request.POST: t.save_page_status(request, request.POST.get('status'), collId, docId, page, transcriptId) success_message = str(_("Page status changed!")) return HttpResponse(success_message, content_type="text/plain") else: regions = transcript.get("PcGts").get("Page").get("TextRegion"); if isinstance(regions, dict): regions = [regions] lineList = [] #regionData = [] # Let's leave this here for now, it might still be needed. if regions: for x in regions: lines = x.get("TextLine") # Region! region_width = crop(x.get("Coords").get("@points"), 1).get('w') if lines: if isinstance(lines, dict): lines['regionWidth'] = region_width lines['@id'] = x.get("@id") + lines['@id'] # TODO Figure out why this results in region_blah_region_blah_line instead of just region_blah_line_, the transcript already has the duplicate region_blah for each line lineList.extend([lines]) #regionData.extend([x.get("@id"), 1]) else: # Assume that lines is a list of lines for line in lines: line['regionWidth'] = region_width line['@id'] = x.get("@id") + line['@id'] # TODO Figure out why this results in region_blah_region_blah_line instead of just region_blah_line_, the transcript already has the duplicate region_blah for each line lineList.extend([line]) #regionData.extend([x.get("@id"), len(lines)]) content_dict = {} # TODO Unmessify this, the loop below might be better placed inside the one above if lineList: for line in lineList: line_crop = crop(line.get("Coords").get("@points")) line['crop'] = line_crop textEquiv = line.get("TextEquiv") if textEquiv: unicode = textEquiv.get("Unicode") if unicode: line['Unicode'] = unicode.replace(" ", "\u00A0") else: line['Unicode'] = "" else: if 'edit' in request.path: t_log('Redirect user back to view mode since no lines in on page. [from: %s to: %s]' % (request.get_full_path(), request.get_full_path().replace('edit', 'view'))) return HttpResponseRedirect(request.get_full_path().replace('edit', 'view')) # Get thumbnails # RM Make one document request here... # RM need to test whether this has been successful document = t.document(request, collId, docId, -1) if isinstance(document,HttpResponse): return apps.utils.views.error_view(request,document) # RM and get pages from the result... and also the url further down pages = document.get('pageList').get('pages') thumb_urls =[] for thumb_page in pages: if 0 < thumb_page.get("tsList").get("transcripts")[0].get("nrOfLines"): if 0 < thumb_page.get("tsList").get("transcripts")[0].get("nrOfTranscribedLines"): thumb_urls.append("['" + escape(thumb_page.get("thumbUrl")).replace("&amp;", "&") + "', 'transcribed']")# The JavaScript must get the strings like this. else: thumb_urls.append("['" + escape(thumb_page.get("thumbUrl")).replace("&amp;", "&") + "', 'only-segmented']")# The JavaScript must get the strings like this. else: thumb_urls.append("['" + escape(thumb_page.get("thumbUrl")).replace("&amp;", "&") + "', 'no-segmentation']")# The JavaScript must get the strings like this. pageStatus = document.get('pageList').get('pages')[int(page) - 1].get("tsList").get('transcripts')[0].get('status') if pageStatus == 'GT' and 'edit' in request.path: t_log('Redirect user back to view mode since page status is GT. [from: %s to: %s]' % (request.get_full_path(), request.get_full_path().replace('edit', 'view'))) return HttpResponseRedirect(request.get_full_path().replace('edit', 'view')) i = request.GET.get('i') if request.GET.get('i') else 'i' if i == 'sbs' or i == 't' and 'edit' in request.path: t_log('Redirect user back to view mode since interface "sbs" and "t" do not support edit. [from: %s to: %s]' % (request.get_full_path(), request.get_full_path().replace('edit', 'view'))) return HttpResponseRedirect(request.get_full_path().replace('edit', 'view')) tags = [ {"name": "abbrev", "color": "FF0000"}, {"name": "date", "color": "0000FF"}, {"name": "gap", "color": "1CE6FF"}, {"name": "person", "color": "00FF00"}, {"name": "place", "color": "8A2BE2"}, {"name": "unclear", "color": "FFCC66"}, {"name": "organization", "color": "FF00FF"} ] #RM defined the dict for all the stuff going to the view so... view_data = { 'imageUrl': document.get('pageList').get('pages')[int(page) - 1].get("url"), 'pageStatus': pageStatus, 'lines': lineList, 'thumbArray': "[" + ", ".join(thumb_urls) + "]", 'collId': collId, 'collName': document.get('collection').get('colName'), 'docId': docId, 'title': document.get('md').get('title'), 'pageNo': page, 'tags': tags, 'i': i, 'role': role, 'metadata' : document.get('md'), #'regionData': regionData, } # we can add the navdata to the end of it view_data.update(navdata) return render(request, 'edit/correct.html', view_data)
gpl-3.0
4,320,347,720,505,138,000
58.633803
237
0.606991
false
4.061391
false
false
false
RobLoach/lutris
lutris/util/wineprefix.py
1
2815
import os from lutris.util.wineregistry import WineRegistry from lutris.util.log import logger from lutris.util import joypad class WinePrefixManager: """Class to allow modification of Wine prefixes without the use of Wine""" hkcu_prefix = "HKEY_CURRENT_USER" def __init__(self, path): self.path = path def setup_defaults(self): self.sandbox() self.override_dll("winemenubuilder.exe", "") def get_registry_path(self, key): if key.startswith(self.hkcu_prefix): return os.path.join(self.path, 'user.reg') else: raise ValueError("Unsupported key '{}'".format(key)) def get_key_path(self, key): if key.startswith(self.hkcu_prefix): return key[len(self.hkcu_prefix) + 1:] else: raise ValueError( "The key {} is currently not supported by WinePrefixManager".format(key) ) def set_registry_key(self, key, subkey, value): registry = WineRegistry(self.get_registry_path(key)) registry.set_value(self.get_key_path(key), subkey, value) registry.save() def clear_registry_key(self, key): registry = WineRegistry(self.get_registry_path(key)) registry.clear_key(self.get_key_path(key)) registry.save() def override_dll(self, dll, mode): key = self.hkcu_prefix + "/Software/Wine/DllOverrides" if mode.startswith("dis"): mode = "" if mode not in ("builtin", "native", "builtin,native", "native,builtin", ""): logger.error("DLL override '%s' mode is not valid", mode) return self.set_registry_key(key, dll, mode) def sandbox(self): user = os.getenv('USER') user_dir = os.path.join(self.path, "drive_c/users/", user) # Replace symlinks if os.path.exists(user_dir): for item in os.listdir(user_dir): path = os.path.join(user_dir, item) if os.path.islink(path): os.unlink(path) os.makedirs(path) def set_crash_dialogs(self, enabled): """Enable or diable Wine crash dialogs""" key = self.hkcu_prefix + "/Software/Wine/WineDbg" value = 1 if enabled else 0 self.set_registry_key(key, "ShowCrashDialog", value) def configure_joypads(self): joypads = joypad.get_joypads() key = self.hkcu_prefix + '/Software/Wine/DirectInput/Joysticks' self.clear_registry_key(key) for device, joypad_name in joypads: if 'event' in device: disabled_joypad = "{} (js)".format(joypad_name) else: disabled_joypad = "{} (event)".format(joypad_name) self.set_registry_key(key, disabled_joypad, 'disabled')
gpl-3.0
4,930,818,353,654,908,000
35.558442
88
0.593961
false
3.595147
false
false
false
gamesun/MyTerm-for-YellowStone
appInfo.py
1
1742
#!/usr/bin/env python # -*- coding: UTF-8 -*- # # Copyright (c) 2013, gamesun # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of gamesun nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY GAMESUN "AS IS" AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL GAMESUN BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # title = 'MyTerm for YellowStone' version = '1.0' file_name = title + ' ' + version url = 'https://github.com/gamesun/MyTerm-for-YellowStone#myterm-for-yellowstone' author = 'gamesun' copyright = 'Copyright (C) 2013, gamesun'
bsd-3-clause
-5,196,408,145,172,072,000
42.55
80
0.762342
false
4.127962
false
false
false
thinker0/aurproxy
tellapart/aurproxy/metrics/publisher.py
1
6667
# Copyright 2015 TellApart, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Base metrics client and derived implementations. """ __copyright__ = 'Copyright (C) 2015 TellApart, Inc. All Rights Reserved.' from abc import ( ABCMeta, abstractmethod) from tellapart.aurproxy.util import ( get_logger, PeriodicTask) logger = get_logger(__name__) class FlushEngine(object): """Class that uses some scheduling mechanism (threading, gevent, etc.) in order to periodically call flush_fn. """ __metaclass__ = ABCMeta def __init__(self, period, flush_fn): """ Args: period - The period in seconds at which to flush. flush_fn - The function to call. """ self._period = period self._flush_fn = flush_fn @abstractmethod def start(self): """Starts the engine. """ @abstractmethod def stop(self): """Stops the engine. """ class ThreadFlushEngine(FlushEngine): """Class that uses a thread to periodically flush. """ def __init__(self, period, flush_fn): super(ThreadFlushEngine, self).__init__(period, flush_fn) self._thread = PeriodicTask(self._period, self._flush_fn) def start(self): """Override of base method. """ self._thread.start() def stop(self): """Override of base method. """ self._thread.stop() class MetricPublisher(object): """Base definition of a class intended to publish metrics to external sources. """ __metaclass__ = ABCMeta def __init__(self, source, period=60, flush_engine=ThreadFlushEngine): """ Args: source - The identifier to use as the source of the data when publishing. period - The period in seconds at which to publish metrics. flush_engine - The type or instance of a FlushEngine used to schedule publication. """ self._period = period self._source = source if isinstance(flush_engine, type): self._flush_engine = flush_engine(self._period, self.publish) else: self._flush_engine = flush_engine self._metric_stores = [] self._started = False @abstractmethod def publish(self): """Publishes metrics to an external endpoint. """ def register_store(self, metric_store): """Registers a metric store with the publisher. Args: metric_store - A MetricStore object. """ # Only start flushing after registration has occurred. if not self._started: self._flush_engine.start() self._started = True self._metric_stores.append(metric_store) class LibratoMetricPublisher(MetricPublisher): """Implementation of a MetricPublisher that publishes to Librato. """ def __init__(self, api_user, api_token, source, period=60, flush_engine=ThreadFlushEngine): """ Args: api_user - The API User for Librato. api_token - The API Token for Librato. source - The identifier to use as the source of the data when publishing. period - The period in seconds at which to publish metrics. flush_engine - The type or instance of a FlushEngine used to schedule publication. """ self._api_user = api_user self._api_token = api_token super(LibratoMetricPublisher, self).__init__(source, period, flush_engine) def _get_queue(self): """Gets a Librato Queue object for bulk submission of metrics. Returns: A Librato Queue object. """ import librato from librato import Queue connection = librato.connect(self._api_user, self._api_token) return Queue(connection) def publish(self): """Override of base method. """ try: logger.info('Publishing metrics to Librato.') queue = self._get_queue() for store in self._metric_stores: for metric in store.get_metrics(): queue.add( name=metric.name, value=metric.value(), type=metric.metric_type.lower(), source=self._source, period=self._period, # Enable Service-Side aggregation by default. attributes={'aggregate': True}) # The Librato queue object takes care of chunking the POSTs on submit. queue.submit() except Exception: logger.exception('Failed to publish metrics to Librato!') class OpenTSDBMetricPublisher(MetricPublisher): """Implementation of a MetricPublisher that publishes to OpenTSDB. """ def __init__(self, prefix, host, port, source, period=60, flush_engine=ThreadFlushEngine): """ Args: host - hostname. port - host port. source - The identifier to use as the source of the data when publishing. period - The period in seconds at which to publish metrics. flush_engine - The type or instance of a FlushEngine used to schedule publication. """ self._prefix = prefix self._host = host self._port = int(port) super(OpenTSDBMetricPublisher, self).__init__(source, period, flush_engine) def hostname(self): import socket return socket.gethostname() def publish(self): import os import time import struct from socket import socket, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, SO_LINGER, IPPROTO_TCP, TCP_NODELAY """Override of base method. """ try: logger.debug('Publishing metrics to OpenTSDB.') sock = socket(AF_INET, SOCK_STREAM) sock.settimeout(3) sock.connect((self._host, self._port)) sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) sock.setsockopt(SOL_SOCKET, SO_LINGER, struct.pack('ii', 1, 0)) ts = int(time.time()) for store in self._metric_stores: for metric in store.get_metrics(): request = "put %s%s%s %d %f host=%s pid=%d" % (self._prefix, self._source, metric.name, ts, metric.value(), self.hostname(), os.getpid()) logger.debug('Publishing: %s' % (request)) sock.sendall(request + "\n") sock.close() except Exception: logger.exception('Failed to publish metrics to OpenTSDB!')
apache-2.0
4,870,028,771,380,186,000
29.865741
117
0.646468
false
4.02111
false
false
false
HeavenMin/PlantImageRecognition
Dataset Process/Delete_Anaything_Not_Flower.py
1
2191
""" AUTHOR : Lang PURPOSE : Multi Self Deep Learning """ __author__ = 'Lang' import tensorflow as tf, sys import os # change this as you see fit graph_path_temple = sys.argv[1] label_path_temple = sys.argv[2] graph_path = os.path.abspath(graph_path_temple) label_path = os.path.abspath(label_path_temple) # Loads label file, strips off carriage return label_lines = [line.rstrip() for line in tf.gfile.GFile(label_path)] # Unpersists graph from file with tf.gfile.FastGFile(graph_path, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) _ = tf.import_graph_def(graph_def, name='') count = 0 tracing = open("processing.txt",'w') tracing.close() for image_dir_path in os.listdir('.'): try: for image_path in os.listdir(image_dir_path): try: # Read in the image_data image_data = tf.gfile.FastGFile(image_dir_path+'/'+image_path, 'rb').read() with tf.Session() as sess: # Feed the image_data as input to the graph and get first prediction softmax_tensor = sess.graph.get_tensor_by_name('final_result:0') predictions = sess.run(softmax_tensor, \ {'DecodeJpeg/contents:0': image_data}) # Sort to show labels of first prediction in order of confidence top_k = predictions[0].argsort()[-len(predictions[0]):][::-1] if label_lines[top_k[0]] == "no": os.remove(image_dir_path+'/'+image_path) print('removed picture '+image_path) else: print('remain picture '+image_path) except: os.remove(image_dir_path+'/'+image_path) print('removed picture'+image_path) count = count +1 tracing = open("processing.txt",'a') tracing.write("finish " + str(count) + " kinds of removing not flower pictures\n") tracing.close() except: print('error:'+ image_dir_path) tracing = open("processing.txt",'a') tracing.write("all finished") tracing.close()
apache-2.0
-2,854,865,646,833,656,000
30.753623
91
0.572798
false
3.713559
false
false
false
mojolab/LivingData
tools/gdocstest.py
1
1443
#!/usr/bin/python import httplib2 import pprint from apiclient.discovery import build from apiclient.http import MediaFileUpload from oauth2client.client import OAuth2WebServerFlow # Copy your credentials from the APIs Console CLIENT_ID = "110041408722.apps.googleusercontent.com" CLIENT_SECRET = "IGeDmFs_w1mieqQ_s9-PJaNN" # Check https://developers.google.com/drive/scopes for all available scopes OAUTH_SCOPE = 'https://www.googleapis.com/auth/drive' # Redirect URI for installed apps REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob' # Path to the file to upload FILENAME = '/home/mojoarjun/CSV/HIVOSPOSTLOG-CIRCLES.CSV' # Run through the OAuth flow and retrieve credentials flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, OAUTH_SCOPE, REDIRECT_URI) authorize_url = flow.step1_get_authorize_url() print 'Go to the following link in your browser: ' + authorize_url code = raw_input('Enter verification code: ').strip() credentials = flow.step2_exchange(code) # Create an httplib2.Http object and authorize it with our credentials http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) # Insert a file media_body = MediaFileUpload(FILENAME, mimetype='text/plain', resumable=True) body = { 'title': 'My document', 'description': 'A test document', 'mimeType': 'text/plain' } file = drive_service.files().insert(body=body, media_body=media_body).execute() pprint.pprint(file)
apache-2.0
7,104,577,775,863,679,000
30.369565
79
0.763687
false
3.199557
false
false
false
colour-science/colour-demosaicing
colour_demosaicing/bayer/demosaicing/menon2007.py
1
10345
# -*- coding: utf-8 -*- """ DDFAPD - Menon (2007) Bayer CFA Demosaicing =========================================== *Bayer* CFA (Colour Filter Array) DDFAPD - *Menon (2007)* demosaicing. References ---------- - :cite:`Menon2007c` : Menon, D., Andriani, S., & Calvagno, G. (2007). Demosaicing With Directional Filtering and a posteriori Decision. IEEE Transactions on Image Processing, 16(1), 132-141. doi:10.1109/TIP.2006.884928 """ import numpy as np from scipy.ndimage.filters import convolve, convolve1d from colour.utilities import as_float_array, tsplit, tstack from colour_demosaicing.bayer import masks_CFA_Bayer __author__ = 'Colour Developers' __copyright__ = 'Copyright (C) 2015-2021 - Colour Developers' __license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause' __maintainer__ = 'Colour Developers' __email__ = 'colour-developers@colour-science.org' __status__ = 'Production' __all__ = [ 'demosaicing_CFA_Bayer_Menon2007', 'demosaicing_CFA_Bayer_DDFAPD', 'refining_step_Menon2007' ] def _cnv_h(x, y): """ Helper function for horizontal convolution. """ return convolve1d(x, y, mode='mirror') def _cnv_v(x, y): """ Helper function for vertical convolution. """ return convolve1d(x, y, mode='mirror', axis=0) def demosaicing_CFA_Bayer_Menon2007(CFA, pattern='RGGB', refining_step=True): """ Returns the demosaiced *RGB* colourspace array from given *Bayer* CFA using DDFAPD - *Menon (2007)* demosaicing algorithm. Parameters ---------- CFA : array_like *Bayer* CFA. pattern : unicode, optional **{'RGGB', 'BGGR', 'GRBG', 'GBRG'}**, Arrangement of the colour filters on the pixel array. refining_step : bool Perform refining step. Returns ------- ndarray *RGB* colourspace array. Notes ----- - The definition output is not clipped in range [0, 1] : this allows for direct HDRI / radiance image generation on *Bayer* CFA data and post demosaicing of the high dynamic range data as showcased in this `Jupyter Notebook <https://github.com/colour-science/colour-hdri/\ blob/develop/colour_hdri/examples/\ examples_merge_from_raw_files_with_post_demosaicing.ipynb>`__. References ---------- :cite:`Menon2007c` Examples -------- >>> CFA = np.array( ... [[ 0.30980393, 0.36078432, 0.30588236, 0.3764706 ], ... [ 0.35686275, 0.39607844, 0.36078432, 0.40000001]]) >>> demosaicing_CFA_Bayer_Menon2007(CFA) array([[[ 0.30980393, 0.35686275, 0.39215687], [ 0.30980393, 0.36078432, 0.39607844], [ 0.30588236, 0.36078432, 0.39019608], [ 0.32156864, 0.3764706 , 0.40000001]], <BLANKLINE> [[ 0.30980393, 0.35686275, 0.39215687], [ 0.30980393, 0.36078432, 0.39607844], [ 0.30588236, 0.36078432, 0.39019609], [ 0.32156864, 0.3764706 , 0.40000001]]]) >>> CFA = np.array( ... [[ 0.3764706 , 0.36078432, 0.40784314, 0.3764706 ], ... [ 0.35686275, 0.30980393, 0.36078432, 0.29803923]]) >>> demosaicing_CFA_Bayer_Menon2007(CFA, 'BGGR') array([[[ 0.30588236, 0.35686275, 0.3764706 ], [ 0.30980393, 0.36078432, 0.39411766], [ 0.29607844, 0.36078432, 0.40784314], [ 0.29803923, 0.3764706 , 0.42352942]], <BLANKLINE> [[ 0.30588236, 0.35686275, 0.3764706 ], [ 0.30980393, 0.36078432, 0.39411766], [ 0.29607844, 0.36078432, 0.40784314], [ 0.29803923, 0.3764706 , 0.42352942]]]) """ CFA = as_float_array(CFA) R_m, G_m, B_m = masks_CFA_Bayer(CFA.shape, pattern) h_0 = np.array([0, 0.5, 0, 0.5, 0]) h_1 = np.array([-0.25, 0, 0.5, 0, -0.25]) R = CFA * R_m G = CFA * G_m B = CFA * B_m G_H = np.where(G_m == 0, _cnv_h(CFA, h_0) + _cnv_h(CFA, h_1), G) G_V = np.where(G_m == 0, _cnv_v(CFA, h_0) + _cnv_v(CFA, h_1), G) C_H = np.where(R_m == 1, R - G_H, 0) C_H = np.where(B_m == 1, B - G_H, C_H) C_V = np.where(R_m == 1, R - G_V, 0) C_V = np.where(B_m == 1, B - G_V, C_V) D_H = np.abs(C_H - np.pad(C_H, ((0, 0), (0, 2)), mode=str('reflect'))[:, 2:]) D_V = np.abs(C_V - np.pad(C_V, ((0, 2), (0, 0)), mode=str('reflect'))[2:, :]) del h_0, h_1, CFA, C_V, C_H k = np.array( [[0, 0, 1, 0, 1], [0, 0, 0, 1, 0], [0, 0, 3, 0, 3], [0, 0, 0, 1, 0], [0, 0, 1, 0, 1]]) # yapf: disable d_H = convolve(D_H, k, mode='constant') d_V = convolve(D_V, np.transpose(k), mode='constant') del D_H, D_V mask = d_V >= d_H G = np.where(mask, G_H, G_V) M = np.where(mask, 1, 0) del d_H, d_V, G_H, G_V # Red rows. R_r = np.transpose(np.any(R_m == 1, axis=1)[np.newaxis]) * np.ones(R.shape) # Blue rows. B_r = np.transpose(np.any(B_m == 1, axis=1)[np.newaxis]) * np.ones(B.shape) k_b = np.array([0.5, 0, 0.5]) R = np.where( np.logical_and(G_m == 1, R_r == 1), G + _cnv_h(R, k_b) - _cnv_h(G, k_b), R, ) R = np.where( np.logical_and(G_m == 1, B_r == 1) == 1, G + _cnv_v(R, k_b) - _cnv_v(G, k_b), R, ) B = np.where( np.logical_and(G_m == 1, B_r == 1), G + _cnv_h(B, k_b) - _cnv_h(G, k_b), B, ) B = np.where( np.logical_and(G_m == 1, R_r == 1) == 1, G + _cnv_v(B, k_b) - _cnv_v(G, k_b), B, ) R = np.where( np.logical_and(B_r == 1, B_m == 1), np.where( M == 1, B + _cnv_h(R, k_b) - _cnv_h(B, k_b), B + _cnv_v(R, k_b) - _cnv_v(B, k_b), ), R, ) B = np.where( np.logical_and(R_r == 1, R_m == 1), np.where( M == 1, R + _cnv_h(B, k_b) - _cnv_h(R, k_b), R + _cnv_v(B, k_b) - _cnv_v(R, k_b), ), B, ) RGB = tstack([R, G, B]) del R, G, B, k_b, R_r, B_r if refining_step: RGB = refining_step_Menon2007(RGB, tstack([R_m, G_m, B_m]), M) del M, R_m, G_m, B_m return RGB demosaicing_CFA_Bayer_DDFAPD = demosaicing_CFA_Bayer_Menon2007 def refining_step_Menon2007(RGB, RGB_m, M): """ Performs the refining step on given *RGB* colourspace array. Parameters ---------- RGB : array_like *RGB* colourspace array. RGB_m : array_like *Bayer* CFA red, green and blue masks. M : array_like Estimation for the best directional reconstruction. Returns ------- ndarray Refined *RGB* colourspace array. Examples -------- >>> RGB = np.array( ... [[[0.30588236, 0.35686275, 0.3764706], ... [0.30980393, 0.36078432, 0.39411766], ... [0.29607844, 0.36078432, 0.40784314], ... [0.29803923, 0.37647060, 0.42352942]], ... [[0.30588236, 0.35686275, 0.3764706], ... [0.30980393, 0.36078432, 0.39411766], ... [0.29607844, 0.36078432, 0.40784314], ... [0.29803923, 0.37647060, 0.42352942]]]) >>> RGB_m = np.array( ... [[[0, 0, 1], ... [0, 1, 0], ... [0, 0, 1], ... [0, 1, 0]], ... [[0, 1, 0], ... [1, 0, 0], ... [0, 1, 0], ... [1, 0, 0]]]) >>> M = np.array( ... [[0, 1, 0, 1], ... [1, 0, 1, 0]]) >>> refining_step_Menon2007(RGB, RGB_m, M) array([[[ 0.30588236, 0.35686275, 0.3764706 ], [ 0.30980393, 0.36078432, 0.39411765], [ 0.29607844, 0.36078432, 0.40784314], [ 0.29803923, 0.3764706 , 0.42352942]], <BLANKLINE> [[ 0.30588236, 0.35686275, 0.3764706 ], [ 0.30980393, 0.36078432, 0.39411766], [ 0.29607844, 0.36078432, 0.40784314], [ 0.29803923, 0.3764706 , 0.42352942]]]) """ R, G, B = tsplit(RGB) R_m, G_m, B_m = tsplit(RGB_m) M = as_float_array(M) del RGB, RGB_m # Updating of the green component. R_G = R - G B_G = B - G FIR = np.ones(3) / 3 B_G_m = np.where( B_m == 1, np.where(M == 1, _cnv_h(B_G, FIR), _cnv_v(B_G, FIR)), 0, ) R_G_m = np.where( R_m == 1, np.where(M == 1, _cnv_h(R_G, FIR), _cnv_v(R_G, FIR)), 0, ) del B_G, R_G G = np.where(R_m == 1, R - R_G_m, G) G = np.where(B_m == 1, B - B_G_m, G) # Updating of the red and blue components in the green locations. # Red rows. R_r = np.transpose(np.any(R_m == 1, axis=1)[np.newaxis]) * np.ones(R.shape) # Red columns. R_c = np.any(R_m == 1, axis=0)[np.newaxis] * np.ones(R.shape) # Blue rows. B_r = np.transpose(np.any(B_m == 1, axis=1)[np.newaxis]) * np.ones(B.shape) # Blue columns. B_c = np.any(B_m == 1, axis=0)[np.newaxis] * np.ones(B.shape) R_G = R - G B_G = B - G k_b = np.array([0.5, 0, 0.5]) R_G_m = np.where( np.logical_and(G_m == 1, B_r == 1), _cnv_v(R_G, k_b), R_G_m, ) R = np.where(np.logical_and(G_m == 1, B_r == 1), G + R_G_m, R) R_G_m = np.where( np.logical_and(G_m == 1, B_c == 1), _cnv_h(R_G, k_b), R_G_m, ) R = np.where(np.logical_and(G_m == 1, B_c == 1), G + R_G_m, R) del B_r, R_G_m, B_c, R_G B_G_m = np.where( np.logical_and(G_m == 1, R_r == 1), _cnv_v(B_G, k_b), B_G_m, ) B = np.where(np.logical_and(G_m == 1, R_r == 1), G + B_G_m, B) B_G_m = np.where( np.logical_and(G_m == 1, R_c == 1), _cnv_h(B_G, k_b), B_G_m, ) B = np.where(np.logical_and(G_m == 1, R_c == 1), G + B_G_m, B) del B_G_m, R_r, R_c, G_m, B_G # Updating of the red (blue) component in the blue (red) locations. R_B = R - B R_B_m = np.where( B_m == 1, np.where(M == 1, _cnv_h(R_B, FIR), _cnv_v(R_B, FIR)), 0, ) R = np.where(B_m == 1, B + R_B_m, R) R_B_m = np.where( R_m == 1, np.where(M == 1, _cnv_h(R_B, FIR), _cnv_v(R_B, FIR)), 0, ) B = np.where(R_m == 1, R - R_B_m, B) del R_B, R_B_m, R_m return tstack([R, G, B])
bsd-3-clause
-7,104,833,143,231,050,000
27.03523
79
0.487385
false
2.434691
false
false
false
reubano/swutils
manage.py
1
2288
#!/usr/bin/env python # -*- coding: utf-8 -*- """ A script to manage development tasks """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) from os import path as p from manager import Manager from subprocess import call manager = Manager() BASEDIR = p.dirname(__file__) @manager.command def clean(): """Remove Python file and build artifacts""" call(p.join(BASEDIR, 'helpers', 'clean'), shell=True) @manager.command def check(): """Check staged changes for lint errors""" call(p.join(BASEDIR, 'helpers', 'check-stage'), shell=True) @manager.arg('where', 'w', help='Modules to check') @manager.command def lint(where=None): """Check style with flake8""" call('flake8 %s' % (where if where else ''), shell=True) @manager.command def pipme(): """Install requirements.txt""" call('pip install -r requirements.txt', shell=True) @manager.command def require(): """Create requirements.txt""" cmd = 'pip freeze -l | grep -vxFf dev-requirements.txt > requirements.txt' call(cmd, shell=True) @manager.arg('where', 'w', help='test path', default=None) @manager.arg( 'stop', 'x', help='Stop after first error', type=bool, default=False) @manager.command def test(where=None, stop=False): """Run nose and script tests""" opts = '-xv' if stop else '-v' opts += 'w %s' % where if where else '' call([p.join(BASEDIR, 'helpers', 'test'), opts]) @manager.command def register(): """Register package with PyPI""" call('python %s register' % p.join(BASEDIR, 'setup.py'), shell=True) @manager.command def release(): """Package and upload a release""" sdist() wheel() upload() @manager.command def build(): """Create a source distribution and wheel package""" sdist() wheel() @manager.command def upload(): """Upload distribution files""" call('twine upload %s' % p.join(BASEDIR, 'dist', '*'), shell=True) @manager.command def sdist(): """Create a source distribution package""" call(p.join(BASEDIR, 'helpers', 'srcdist'), shell=True) @manager.command def wheel(): """Create a wheel package""" call(p.join(BASEDIR, 'helpers', 'wheel'), shell=True) if __name__ == '__main__': manager.main()
mit
-581,464,328,525,125,900
21.653465
78
0.645542
false
3.477204
false
false
false
bstroebl/QGIS
python/plugins/GdalTools/tools/dialogSRS.py
1
2128
# -*- coding: utf-8 -*- """ *************************************************************************** dialogSRS.py --------------------- Date : June 2010 Copyright : (C) 2010 by Giuseppe Sucameli Email : brush dot tyler at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Giuseppe Sucameli' __date__ = 'June 2010' __copyright__ = '(C) 2010, Giuseppe Sucameli' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from PyQt4.QtCore import * from PyQt4.QtGui import * from qgis.core import * from qgis.gui import * class GdalToolsSRSDialog(QDialog): def __init__(self, title, parent=None): QDialog.__init__(self, parent) self.setWindowTitle( title ) layout = QVBoxLayout() self.selector = QgsProjectionSelector(self) buttonBox = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Close) layout.addWidget(self.selector) layout.addWidget(buttonBox) self.setLayout(layout) self.connect(buttonBox, SIGNAL("accepted()"), self.accept) self.connect(buttonBox, SIGNAL("rejected()"), self.reject) def epsg(self): return "EPSG:" + str(self.selector.selectedEpsg()) def proj4string(self): return self.selector.selectedProj4String() def getProjection(self): if self.selector.selectedEpsg() != 0: return self.epsg() if not self.selector.selectedProj4String().isEmpty(): return self.proj4string() return QString()
gpl-2.0
715,995,025,839,427,600
33.885246
80
0.522086
false
4.387629
false
false
false
alaeddine10/ggrc-core
src/ggrc/fulltext/mysql.py
1
1158
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from sqlalchemy import event from sqlalchemy.sql.expression import text from sqlalchemy.schema import DDL from .sql import SqlIndexer class MysqlRecordProperty(db.Model): __tablename__ = 'fulltext_record_properties' __table_args__ = {'mysql_engine': 'myisam'} key = db.Column(db.Integer, primary_key=True) type = db.Column(db.String(64), primary_key=True) tags = db.Column(db.String) property = db.Column(db.String(64), primary_key=True) content = db.Column(db.Text) event.listen( MysqlRecordProperty.__table__, 'after_create', DDL('ALTER TABLE {tablename} ADD FULLTEXT INDEX {tablename}_text_idx ' '(content)'.format(tablename=MysqlRecordProperty.__tablename__)) ) class MysqlIndexer(SqlIndexer): record_type = MysqlRecordProperty def search(self, terms): return db.session.query(self.record_type).filter( 'match (content) against (:terms)').params(terms=terms).all() Indexer = MysqlIndexer
apache-2.0
8,088,862,217,565,668,000
30.324324
78
0.722798
false
3.436202
false
false
false
PythonMid/pymidweb
pythonmid/apps/community/migrations/0006_auto_20150624_2211.py
1
1137
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import pythonmid.apps.community.models class Migration(migrations.Migration): dependencies = [ ('community', '0005_sponsor_level'), ] operations = [ migrations.AlterField( model_name='sponsor', name='level', field=models.IntegerField(choices=[(1, 'Gold'), (2, 'Platinum')], help_text='Nivel Prioridad Sponsor'), ), migrations.AlterField( model_name='sponsor', name='logo', field=models.ImageField(help_text='imagen del patrocinador', upload_to=pythonmid.apps.community.models.image_path), ), migrations.AlterField( model_name='sponsor', name='provides', field=models.CharField(max_length=600, help_text='Recurso o actividad que aporta a la comunidad'), ), migrations.AlterField( model_name='sponsor', name='website', field=models.URLField(max_length=700, help_text='URL de sitio oficial'), ), ]
gpl-2.0
3,638,746,908,936,171,000
31.485714
127
0.593668
false
4.031915
false
false
false
anushreejangid/csmpe-main
csmpe/core_plugins/csm_install_operations/ios_xe/pre_activate.py
1
8907
# ============================================================================= # # Copyright (c) 2016, Cisco Systems # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. # ============================================================================= import re from csmpe.plugins import CSMPlugin from utils import available_space from utils import number_of_rsp from utils import install_folder from utils import check_issu_readiness from utils import remove_exist_subpkgs from utils import install_package_family from utils import create_folder from utils import xe_show_platform from utils import check_pkg_conf class Plugin(CSMPlugin): """This plugin performs pre-activate tasks.""" name = "Install Pre-Activate Plugin" platforms = {'ASR900'} phases = {'Pre-Activate'} os = {'XE'} def run(self): self.ctx.info("Hardware platform: {}".format(self.ctx._connection.platform)) self.ctx.info("OS Version: {}".format(self.ctx._connection.os_version)) try: packages = self.ctx.software_packages except AttributeError: self.ctx.warning("No package list provided. Skipping calculation of required free bootflash memory.") return pkg = ''.join(packages) con_platforms = ['ASR-902', 'ASR-920'] sub_platforms = ['ASR-903', 'ASR-907'] rsp_count = 1 folder = 'bootflash:' stby_folder = 'stby-bootflash:' # check the device type vs the package family supported_imgs = {} supported_imgs['asr902'] = ['asr900', 'asr903'] supported_imgs['asr903'] = ['asr900', 'asr903'] supported_imgs['asr907'] = ['asr900', 'asr903'] supported_imgs['asr920'] = ['asr920'] m = re.search('ASR-(\d+)', self.ctx._connection.platform) if m: device_family = m.group(1) device_family = 'asr' + device_family else: self.ctx.error("Unspported device: {}".format(self.ctx._connection.platform)) return pkg_family = install_package_family(pkg) if not pkg_family: self.ctx.info("Private device image: {}".format(pkg)) if pkg_family not in supported_imgs[device_family]: self.ctx.info("Private device image: {} on {}".format(pkg, self.ctx._connection.platform)) # check the RSP type between image and device: curr_rsp = None pkg_rsp = None output = self.ctx.send("show version | include RSP") if output: m = re.search('(RSP\d)', output) if m: curr_rsp = m.group(0).lower() m = re.search('(rsp\d)', pkg) if m: pkg_rsp = m.group(0) if curr_rsp and pkg_rsp and curr_rsp != pkg_rsp: self.ctx.info("Incompatible Route processor in {} for this device {}".format(pkg, curr_rsp)) # Determine one of the following modes: consolidated, subpackage, or issu if self.ctx._connection.platform in con_platforms: mode = 'consolidated' elif self.ctx._connection.platform in sub_platforms: mode = 'subpackage' # Determine the number of RSP's in the chassis rsp_count = number_of_rsp(self.ctx) if rsp_count == 0: self.ctx.error("No RSP is discovered") return # Determine the install folder folder = install_folder(self.ctx) stby_folder = 'stby-' + folder # Create the folder if it does not exist if not create_folder(self.ctx, folder): self.ctx.error("Install folder {} creation failed", format(folder)) return if rsp_count == 2 and not create_folder(self.ctx, stby_folder): self.ctx.error("Install folder {} creation " "failed", format(stby_folder)) return else: self.ctx.error("Unsupported platform: {}".format(self.ctx._connection.platform)) return total_size = 10000000 valid_pkg_conf = False if mode == 'subpackage': # Check if the packages.conf is valid valid_pkg_conf = check_pkg_conf(self.ctx, folder) # Remove residual image files from previous installations if valid_pkg_conf: remove_exist_subpkgs(self.ctx, folder, pkg) else: self.ctx.warning("Empty or invalid {}/packages.conf".format(folder)) self.ctx.warning("Residual packages from previous installations are not " "automatically removed from bootflash: / stby-bootflash:.") self.ctx.info("Sub-package mode will be performed to " "activate package = {}".format(pkg)) cmd = "dir bootflash: | include " + pkg output = self.ctx.send(cmd) if output: m = re.search('-rw-\s+(\d+)\s+', output) if m: total_size += int(m.group(1)) flash_free = available_space(self.ctx, 'bootflash:') self.ctx.info("Total required / bootflash " "available: {} / {} bytes".format(total_size, flash_free)) if flash_free < total_size: self.ctx.error("Not enough space on bootflash: to install packages. " "The install process can't proceed.\n" "Please erase unused images, crashinfo, " "core files, and tracelogs") else: self.ctx.info("There is enough space on bootflash: to install packages.") if rsp_count == 2: if valid_pkg_conf: remove_exist_subpkgs(self.ctx, stby_folder, pkg) stby_free = available_space(self.ctx, 'stby-bootflash:') self.ctx.info("Total required / stby-bootflash " "available: {} / {} bytes".format(total_size, stby_free)) if stby_free < total_size: self.ctx.error("Not enough space on stby-bootflash: to " "install packages. The install process can't proceed.\n" "Please erase unused images, crashinfo, core files, " "and tracelogs") else: self.ctx.info("There is enough space on stby-bootflash: to install packages.") # Determine if ISSU is feasible if mode == 'subpackage' and rsp_count == 2 and valid_pkg_conf: if check_issu_readiness(self.ctx, pkg, total_size): mode = 'issu' self.ctx.info("ISSU will be performed to activate package = {}".format(pkg)) # Log the status of RP and SIP platform_info = xe_show_platform(self.ctx) if not platform_info: self.ctx.error("The CLI 'show platform' is not able to determine the status of RP and SIP ") return self.ctx.info("show platform = {}".format(platform_info)) self.ctx.info("Activate number of RSP = {}".format(rsp_count)) self.ctx.info("Activate package = {}".format(pkg)) self.ctx.info("Install folder = {}".format(folder)) self.ctx.info("Activate package mode = {}".format(mode)) self.ctx.save_data('xe_rsp_count', rsp_count) self.ctx.save_data('xe_activate_pkg', pkg) self.ctx.save_data('xe_boot_mode', mode) self.ctx.save_data('xe_install_folder', folder) self.ctx.save_data('xe_show_platform', platform_info) return True
bsd-2-clause
3,346,183,386,358,608,000
41.822115
113
0.589536
false
4.223329
false
false
false
jonmorehouse/vimhub
lib/issue.py
1
8147
import utils import config import re import git import copy import comment_list import webbrowser import github try: import vim except ImportError as e: vim = False i_hash = {} # hash individual issues class Issue: defaults = { "title": "", "assignee": "", "milestone": "", "state": "open", "labels": [], "body": "", } def __init__(self, **kwargs): # set defaults for class if not Issue.defaults.get("assignee"): Issue.defaults["assignee"] = utils.github.user()["login"] self.repo = kwargs.get("repo") self.number = kwargs.get("number") self.issue_uri = "repos/%s/issues/%s" % (self.repo, self.number) self.comments = comment_list.CommentList(self.number, self.repo) self._get_data() @classmethod def open(cls, *args): i = cls._issue_from_args(*args) if not i or not i.repo: print "Not a valid repository or issue. Please try again or consult help pages" return i.post_hook() @classmethod def browse(cls, *args): i = cls._issue_from_args(*args) if hasattr(i, "url"): webbrowser.open(i.url) i.map_buffer() @classmethod def save(cls): i = cls._issue_from_buffer() if not i: print "Error has occurred. Issue was not found. Please report an issue on github" return # parse the uri from this issue i.position = vim.current.window.cursor i.parse() # parse the buffer i.update() # push to the server i.post_hook() @classmethod def toggle_state(cls): i = cls._issue_from_buffer() i.position = vim.current.window.cursor i.parse() # parse current buffer to correct location i.change_state() i.update() i.post_hook() @classmethod def _issue_from_args(cls, *args, **kwargs): kwargs = utils.args_to_kwargs(args, kwargs) if not kwargs.get("args") or len(kwargs.get("args")) == 0: kwargs["number"] = "new" else: kwargs["number"] = kwargs.get("args")[0] del kwargs["args"] key = "%s/%s" % (kwargs.get("repo"), kwargs.get("number")) if not i_hash.has_key(key): i_hash[key] = cls(**kwargs) return i_hash.get(key) @classmethod def _issue_from_buffer(cls): # bname corresponds to to the issue hash key bname = vim.current.buffer.name # check to make sure the correct pieces are here mg = re.match(r"(?P<user>.*)/(?P<repo>.*)/(?P<issue>.*)", bname) if not mg: return None return i_hash.get(bname) def change_state(self): if self.data["state"] == "open": self.data["state"] = "closed" else: self.data["state"] = "open" def parse(self): # reset body self.data["body"] = [] # this is messy - convert to a matchgroup in the future for index, line in enumerate(vim.current.buffer[1:]): mg = re.match(r"# (?P<label>[^:]+): (?P<value>.*)", line) # handle normal attribute if mg: value = mg.group("value") label = mg.group("label").lower() if label in self.defaults.keys(): if type(self.defaults[label]) == list: self.data[label] = value.split(",") else: self.data[label] = value # handle error elif re.search(r"^## Comments Issue #%s" % self.number, line): # pass the comments to the other section self.comments.parse(vim.current.buffer[index+1:-1]) break else: self.data["body"].append(line) self.data["body"] = utils.trim_lines(self.data["body"]) def post_hook(self): self.draw() self.map_buffer() if hasattr(self, "position"): vim.command(str(self.position[0])) #vim.command("|%s" % str(self.position[1])) def map_buffer(self): # autocommand to call on post save ... vim.command("map <buffer> s :python issue.Issue.save()<cr>") # uses current buffer name # toggle the state of the current issue vim.command("map <buffer> cc :python issue.Issue.toggle_state()<cr>") # uses current buffer name # hit enter to browse the current url vim.command("map <buffer> <cr> :normal! 0<cr>:python issue.Issue.browse(\"%s\", \"%s\")<cr>" % (self.repo, self.number)) # doesn't use current buffer name def draw(self): self.buffer_name = "%s/%s" % (self.repo, self.number) b = utils.get_buffer(self.buffer_name) vim.command("1,$d") vim.command("set filetype=markdown") # print out issue b.append("## %s # %s" % (self.repo, self.number)) b.append("") # iterate through all keys that aren't body keys = self.data.keys() keys.remove("body") for key in keys: value = self.data[key] if type(value) == list: value = ",".join(value) b.append("# %s: %s" % (key.capitalize(), value)) # print out body if applicable if self.data.has_key("body") and self.data["body"]: for line in self.data["body"].splitlines(): b.append(line) # now we need to print the comments self.comments.draw(b) # remove leading line vim.command("1delete _") def update(self): if self.number == "new": self._create_issue() else: self._save_issue() def _get_data(self): self.data = copy.deepcopy(self.defaults) # get issue from github api if not new if not self.number == "new": data, status = github.request(github.url(self.issue_uri)) if not status: utils.log(data) return # issue was successfully requested for key in self.defaults.keys() + ["assignee", "user"]: # github will return None if key in ("assignee", "user") and data.get(key): self.data[key] = data[key]["login"] elif key == "labels": self.data[key] = [str(label["name"]) for label in data[key]] elif key == "milestone" and data.get("milestone"): self.data[key] = data[key]["title"] elif data.get(key): self.data[key] = data[key] # grab the browse url self.url = data["html_url"] def _create_issue(self): # create issue on the server uri = "repos/%s/issues" % self.repo url = github.url(uri) data = utils.clean_data(copy.deepcopy(self.data), ["state"]) if not data or len(data.keys()) == 0: utils.log("New issues require title/body") return data, status = github.request(url, "post", data) if not status: utils.log(data) return # update attributes as needed for object self.number = str(data["number"]) self.data["user"] = data["user"]["login"] self.url = data["html_url"] self.issue_uri = "repos/%s/issues/%s" % (self.repo, self.number) self.comments.number = self.number # clean up hash del i_hash["%s/%s" % (self.repo, "new")] i_hash["%s/%s" % (self.repo, self.number)] = self # delete the old buffer that we don't need any more vim.command("silent new") vim.command("bdelete %s" % self.buffer_name) def _save_issue(self): # get ready for the patch operation url = github.url(self.issue_uri) data = utils.clean_data(copy.deepcopy(self.data), ["number", "user", "labels"]) data, status = github.request(url, "patch", data)
mit
8,800,810,227,783,466,000
30.824219
162
0.527924
false
3.90369
false
false
false
UofS-CTLE/Projtrack3
ctleweb/d2lstat/views.py
1
1924
from django.shortcuts import render from .d2lstat import process_file, calculateVirtualClassroomStats, facultyNotUsingD2LCalculation from .forms import UploadFileForm, VirtualClassroomUsageForm, FacultyNotUsingD2LForm def index(request): if request.method == 'POST': process_file(request.FILES['usage'].temporary_file_path(), request.FILES['full'].temporary_file_path(), request.FILES['part'].temporary_file_path(), request.POST['semester'], request.POST['total_courses']) return render(request, 'd2lstat/report.html') else: form = UploadFileForm() return render(request, 'd2lstat/index.html', {'form': form}) def virtualClassroomStats(request): if request.method == 'POST': statsList = calculateVirtualClassroomStats(request.FILES['usage'].temporary_file_path(), request.FILES['full'].temporary_file_path(), request.FILES['part'].temporary_file_path(), request.FILES['virtualClassroomData'].temporary_file_path()) return render(request, 'd2lstat/virtualClassroomStatsResults.html', {'statsList':statsList}) else: form = VirtualClassroomUsageForm() return render(request, 'd2lstat/virtualClassroomStats.html', {'form': form}) def facultyNotUsingD2L(request): if request.method == 'POST': statsList = facultyNotUsingD2LCalculation(request.FILES['usage'].temporary_file_path(), request.FILES['full'].temporary_file_path(), request.FILES['part'].temporary_file_path(), request.POST['semester']) return render(request, 'd2lstat/FacultyNotUsingD2LResults.html', {'statsList':statsList}) else: form = FacultyNotUsingD2LForm() return render(request, 'd2lstat/FacultyNotUsingD2L.html', {'form': form})
gpl-3.0
1,134,818,794,261,291,500
48.333333
100
0.650208
false
4.008333
false
false
false
bverdu/onDemand
gui/widgets.py
1
15338
# encoding: utf-8 ''' Created on 29 mai 2015 @author: Bertrand Verdu ''' from __future__ import print_function import os from kivy.clock import Clock from kivy.core.window import Window # from kivy.graphics.transformation import Matrix from kivy.loader import Loader from kivy.metrics import dp from kivy.properties import ObjectProperty,\ StringProperty, DictProperty, BooleanProperty, ListProperty from kivy.uix.boxlayout import BoxLayout from kivy.uix.bubble import Bubble from kivy.uix.button import Button from kivy.uix.filechooser import FileChooserListView from kivy.uix.gridlayout import GridLayout from kivy.uix.floatlayout import FloatLayout from kivy.uix.image import Image, AsyncImage from kivy.uix.label import Label from kivy.uix.popup import Popup from kivy.uix.scatter import Scatter from kivy.uix.screenmanager import Screen from kivy.uix.settings import SettingString, SettingSpacer, SettingPath from kivy.uix.togglebutton import ToggleButton Loader.num_workers = 4 Loader.loading_image = 'data/icons/wait.zip' def button_size(): if Window.size[0] > Window.size[1]: return Window.size[0] / 5, Window.size[1] / 8 else: return Window.size[0] / 3, Window.size[1] / 8 class Home(Screen): background = StringProperty('data/background_ebony.png') status = DictProperty() room = '' def unlock_widgets(self, state): print('unlock: %s' % state) if isinstance(self.children[0], FloatLayout): for w in self.children[0].children: w.do_translation_x = state w.do_translation_y = state class StartPage(GridLayout): devices = DictProperty() roomlist = DictProperty() format = ListProperty([i for i in button_size()]) _rooms = [] _devices = [] rooms = None lights = None medias = None first = True def on_devices(self, instance, value): # print('device: %s' % value) for uid, device in value.items(): if uid not in self._devices: for dev in self.roomlist[device['room']]['devices']: if dev['uid'] == uid: self.add_device(dev) def on_roomlist(self, instance, value): # print('room: %s' % value) # print(len(self.children)) if len(self.children) == 0: def later(ignored): self.on_roomlist(instance, value) # f = lambda ignored: self.on_roomlist(instance, value) Clock.schedule_once(later, 2) return # print(len(self.children)) # print(self.rooms) # print(self.ids) if not self.rooms: self._rooms = [] self._devices = [] if len(self.children) > 0: for child in self.children: if len(child.children) > 0: child = child.children[0] if child.typ: print(child.typ) setattr(self, child.typ, child) for room, values in value.items(): if room == 'Home': continue if room not in self._rooms: if self.rooms: # print('add room: %s -- %s' % (room, values['pic'])) self.add_room(room, values['pic']) if self.rooms: for device in values['devices']: if device['uid'] not in self._devices: # print('add device: %s' % device) self.add_device(device) def add_room(self, room, pic): # print('from Window: %s X %s' % Window.size) w = button_size()[0] r = RoomButton(ltext=room, pic=pic, width=w, size_hint=(None, 1)) # size_hint=((.20, 1) # if Window.size[0] >= Window.size[1] # else (.4, 1))) self.rooms.add_widget(r) self._rooms.append(room) def add_device(self, device): w = button_size()[0] print('dimensions d : %s X %s' % (self.parent.width, self.parent.height)) if device['type'] == 'Lights': if self.lights: b = LightButtonWithText( pic=type_img(device['type']), ltext=device['name'], width=w, size_hint=(None, 1)) # size_hint=((.20, 1) # if Window.size[0] >= Window.size[1] # else (.4, 1))) self.lights.add_widget(b) elif device['type'] == 'MediaPlayer': if self.medias: b = MediaButtonWithText( pic=type_img(device['type']), ltext=device['name'], device=device, width=w, size_hint=(None, 1)) self.medias.add_widget(b) else: return self._devices.append(device['uid']) class Shutters(Screen): pass class Scenarios(Screen): pass class RoomButton(Button): pic = StringProperty() ltext = StringProperty() class LightButtonWithText(ToggleButton): pic = StringProperty() ltext = StringProperty() class MediaButtonWithText(ToggleButton): pic = StringProperty() ltext = StringProperty() device = ObjectProperty() class Pop_device(object): def __init__(self, parent): self.parent = parent content = parent.typ(pop=self) self.popup = Popup( title=parent.name, content=content, size_hint=(.2, .3)) def display(self): self.popup.open() def dismiss(self): self.popup.dismiss() def define_size(self, size): print('Size: %s' % size) if size[0] < 120: print('resize2 !') self.popup.size_hint = self.popup.size_hint[0] * 1.5,\ self.popup.size_hint[1] * 1.5 # self.popup.size = (100, 100) # self.popup.content.size = (100, 100) class Bubble_device(Bubble): pass class Player_menu(BoxLayout): pop = ObjectProperty() class Light_menu(BoxLayout): pop = ObjectProperty() class Bubble_player(Bubble): pass # class SensorLabel(Label): class SensorPad(Scatter): sensors = ListProperty([]) def __init__(self, *args, **kwargs): super(SensorPad, self).__init__(*args, **kwargs) def on_sensors(self, instance, value): self.ids.bl.clear_widgets() for s in value: d, v, u = s.get() if u is None: l = Label(text='%s: %s' % (d, ('Oui' if v else 'Non'))) else: l = Label(text='%s: %s %s' % (d, v, u)) self.ids.bl.add_widget(l) self.size = (self.size[0], 30 * len(value)) # m = Matrix().scale(1, len(value), 1) # self.apply_transform(m, True) class DeviceButton(Scatter): pic_true = StringProperty('data/icons/lamp_1.png') pic_false = StringProperty('data/icons/lamp_0.png') state = BooleanProperty(False) play = ObjectProperty(None) config = ObjectProperty(None) open = ObjectProperty(None) name = StringProperty('Light') bubble = ObjectProperty(None) scheduled = False typ = Light_menu def pushed(self): if self.do_translation_x: # print('unlocked') return True else: Clock.schedule_once(self.show_bubble, 1) self.scheduled = True def on_touch_up(self, touch): if self.scheduled: Clock.unschedule(self.show_bubble) self.scheduled = False # print('locked') self.state = not self.state self.play(self) if self.do_translation_x: # print('locking') self.do_translation_x = False self.do_translation_y = False if self.config: self.config.set( self.name, 'position', str(self.pos[0]) + '*' + str(self.pos[1])) self.config.write() def unlock(self, state=True): # print('unlock') self.do_translation_x = state self.do_translation_y = state # self.unlocked = True # self.remove_widget(self.bubb) # return False def show_bubble(self, *l): self.scheduled = False # self.bubb = bubb = self.bubble() # bubb.pos = bubb.pos[0] + self.width, bubb.pos[1] if not self.bubble: self.bubble = Pop_device(self) self.bubble.display() # self.add_widget(bubb) # def on_touch_down(self, touch): # # print('touch %s - %s' % (touch.pos, self.pos)) # # '''.. versionchanged:: 1.4.0''' # if self.collide_point(*touch.pos): # self.state = not self.state # print(self.state) # return self.play(self) # if self.locked: # return True class LightButton(DeviceButton): pass class PlayerButton(DeviceButton): pic_true = StringProperty('data/icons/multimedia_playing') pic_false = StringProperty('data/icons/multimedia_stopped.png') typ = Player_menu # bubble = Pop_device(self) class ScatterCross(Scatter): pass class HVAC(Screen): pass class BgImage(AsyncImage): pass class SettingImg(SettingPath): def _create_popup(self, instance): from jnius import autoclass # SDcard Android # Get path to SD card Android try: Environment = autoclass('android.os.Environment') # print(Environment.DIRECTORY_DCIM) # print(Environment.DIRECTORY_MOVIES) # print(Environment.DIRECTORY_MUSIC) env = Environment() print('two') sdpath = env.getExternalStorageDirectory().getAbsolutePath() try: if not env.isExternalStorageRemovable(): if os.path.lexists('/storage/sdcard1'): sdpath = '/storage/sdcard1/'\ + Environment.DIRECTORY_PICTURES else: print('removable') except Exception as err: print(err) print('three') print(':)') # Not on Android except: print(':(') sdpath = os.path.expanduser('~') print('popup!') print(sdpath) # create popup layout content = BoxLayout(orientation='vertical', spacing=5) # popup_width = min(0.95 * Window.width, dp(500)) self.popup = popup = Popup( title=self.title, content=content, size_hint=(None, 0.9), width=dp(300)) # create the filechooser print('1') if os.path.isfile(self.value): print('file!') path = os.path.split(self.value)[0] if len(sdpath) == 0: path = os.path.expanduser('~') elif '/data/living.png' in self.value: print('living found!') path = sdpath else: path = sdpath print(path) self.textinput = textinput = FileChooserListView( path=path, size_hint=(1, 1), dirselect=True) textinput.bind(on_path=self._validate) self.textinput = textinput # construct the content content.add_widget(textinput) content.add_widget(SettingSpacer()) # 2 buttons are created for accept or cancel the current value btnlayout = BoxLayout(size_hint_y=None, height='50dp', spacing='5dp') btn = Button(text='Ok') btn.bind(on_release=self._validate) btnlayout.add_widget(btn) btn = Button(text='Cancel') btn.bind(on_release=self._dismiss) btnlayout.add_widget(btn) content.add_widget(btnlayout) # all done, open the popup ! popup.open() class SettingPos(SettingString): '''Implementation of a string setting on top of a :class:`SettingItem`. It is visualized with a :class:`~kivy.uix.label.Label` widget that, when clicked, will open a :class:`~kivy.uix.popup.Popup` with a :class:`~kivy.uix.textinput.Textinput` so the user can enter a custom value. ''' popup = ObjectProperty(None, allownone=True) '''(internal) Used to store the current popup when it's shown. :attr:`popup` is an :class:`~kivy.properties.ObjectProperty` and defaults to None. ''' # position = ObjectProperty(None) '''(internal) Used to store the current textinput from the popup and to listen for changes. :attr:`textinput` is an :class:`~kivy.properties.ObjectProperty` and defaults to None. ''' pic = StringProperty() position = StringProperty('50*50') def __init__(self, **kwargs): super(SettingPos, self).__init__(**kwargs) self.img = Image(source=self.pic) def on_panel(self, instance, value): if value is None: return self.bind(on_release=self._create_popup) def _dismiss(self, *largs): if self.popup: self.popup.dismiss() self.popup = None def _register(self, instance, touch): if self.img.collide_point(*touch.pos): # self.position = '*'.join([str(p) for p in touch.pos]) # print(touch) # print(self.img.pos) # print(self.img.size) # print(Window.size) x, y = self.img.to_widget(touch.pos[0], touch.pos[1], True) x = x - self.img.pos[0] - 20.0 y = y + 68.0 # print('%s * %s' % (x, y)) self.position = str(x) + '*' + str(y) def _validate(self, instance): value = self.position self.value = value # print(self.value) self._dismiss() def _create_popup(self, instance): # create popup layout content = BoxLayout(orientation='vertical', spacing='5dp') # popup_width = min(0.95 * Window.width, dp(500)) self.popup = popup = Popup( title=self.title, content=content) pos = [float(c) for c in self.value.split('*')] scat = ScatterCross(size=(20, 20), size_hint=(None, None), pos=pos) scat.bind(on_touch_up=self._register) self.img.add_widget(scat) content.add_widget(self.img) content.add_widget(SettingSpacer()) # 2 buttons are created for accept or cancel the current value btnlayout = BoxLayout(size_hint_y=None, height='50dp', spacing='5dp') btn = Button(text='Ok') btn.bind(on_release=self._validate) btnlayout.add_widget(btn) btn = Button(text='Cancel') btn.bind(on_release=self._dismiss) btnlayout.add_widget(btn) content.add_widget(btnlayout) # all done, open the popup ! popup.open() def type_img(typ): if typ in ['Lights']: return 'data/icons/lamp_1.png' elif typ in ['MediaPlayer']: return 'data/icons/Multimedia.png'
agpl-3.0
1,037,562,344,166,610,300
29.923387
79
0.548703
false
3.816372
false
false
false
Salamek/reader
reader/read.py
1
1502
from acr122l import acr122l import time acr122l = acr122l() true = True def read_card(): global true true = False key = [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF] print acr122l.TAG_Authenticate(0x00, key) print 'Starting read data' readed = acr122l.TAG_Read(0x01) if readed: acr122l.LCD_Clear() acr122l.LED_control('0010') acr122l.LCD_Text(False,'A',0x00,'Done! Wait 10 s to next scan...') acr122l.LCD_Text(False,'A',0x40,readed) acr122l.Buzzer_control(1,1,1) else: acr122l.LCD_Clear() acr122l.LED_control('0001') acr122l.LCD_Text(False,'A',0x00,'Error,Scan again') acr122l.LCD_Text(False,'A',0x40,'Wait 10 s to next scan...') acr122l.Buzzer_control(10,10,1) time.sleep(5) acr122l.LCD_back_light(True) acr122l.LED_control('1000') true = True acr122l.LCD_Clear() acr122l.LCD_Text(False,'A',0x00,'Ready') #cnt = 1 acr122l.LED_control('1000') acr122l.LCD_back_light(True) acr122l.LCD_Clear() acr122l.LCD_Text(False,'A',0x00,'Ready') while true: ret = acr122l.TAG_Polling() if ret: acr122l.LCD_Clear() acr122l.LCD_Text(False,'A',0x00,'Reading...') acr122l.LED_control('0100') #if cnt != ret[17]: #cnt = ret[17] target_number = ret[18] #Target number sens_res = [ret[19],ret[20]] #SENS_RES sel_res = ret[21] #SEL_RES len_uid = ret[22] #Length of the UID end_uid = 25+len_uid uid = [] for i in range(25, end_uid): uid.append(ret[i]) if uid: read_card() #break #else: # true = False #else: # if cnt: # cnt = 0
gpl-3.0
-4,102,813,253,444,852,700
19.861111
68
0.654461
false
2.208824
false
false
false
leandrotoledo/python-telegram-bot
telegram/payment/precheckoutquery.py
2
5229
#!/usr/bin/env python # # A library that provides a Python interface to the Telegram Bot API # Copyright (C) 2015-2021 # Leandro Toledo de Souza <devs@python-telegram-bot.org> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser Public License for more details. # # You should have received a copy of the GNU Lesser Public License # along with this program. If not, see [http://www.gnu.org/licenses/]. """This module contains an object that represents a Telegram PreCheckoutQuery.""" from typing import TYPE_CHECKING, Any, Optional from telegram import OrderInfo, TelegramObject, User from telegram.utils.helpers import DEFAULT_NONE from telegram.utils.types import JSONDict, ODVInput if TYPE_CHECKING: from telegram import Bot class PreCheckoutQuery(TelegramObject): """This object contains information about an incoming pre-checkout query. Objects of this class are comparable in terms of equality. Two objects of this class are considered equal, if their :attr:`id` is equal. Note: In Python ``from`` is a reserved word, use ``from_user`` instead. Args: id (:obj:`str`): Unique query identifier. from_user (:class:`telegram.User`): User who sent the query. currency (:obj:`str`): Three-letter ISO 4217 currency code. total_amount (:obj:`int`): Total price in the smallest units of the currency (integer, not float/double). For example, for a price of US$ 1.45 pass ``amount = 145``. See the :obj:`exp` parameter in `currencies.json <https://core.telegram.org/bots/payments/currencies.json>`_, it shows the number of digits past the decimal point for each currency (2 for the majority of currencies). invoice_payload (:obj:`str`): Bot specified invoice payload. shipping_option_id (:obj:`str`, optional): Identifier of the shipping option chosen by the user. order_info (:class:`telegram.OrderInfo`, optional): Order info provided by the user. bot (:class:`telegram.Bot`, optional): The Bot to use for instance methods. **kwargs (:obj:`dict`): Arbitrary keyword arguments. Attributes: id (:obj:`str`): Unique query identifier. from_user (:class:`telegram.User`): User who sent the query. currency (:obj:`str`): Three-letter ISO 4217 currency code. total_amount (:obj:`int`): Total price in the smallest units of the currency. invoice_payload (:obj:`str`): Bot specified invoice payload. shipping_option_id (:obj:`str`): Optional. Identifier of the shipping option chosen by the user. order_info (:class:`telegram.OrderInfo`): Optional. Order info provided by the user. bot (:class:`telegram.Bot`): Optional. The Bot to use for instance methods. """ __slots__ = ( 'bot', 'invoice_payload', 'shipping_option_id', 'currency', 'order_info', 'total_amount', 'id', 'from_user', '_id_attrs', ) def __init__( self, id: str, # pylint: disable=W0622 from_user: User, currency: str, total_amount: int, invoice_payload: str, shipping_option_id: str = None, order_info: OrderInfo = None, bot: 'Bot' = None, **_kwargs: Any, ): self.id = id # pylint: disable=C0103 self.from_user = from_user self.currency = currency self.total_amount = total_amount self.invoice_payload = invoice_payload self.shipping_option_id = shipping_option_id self.order_info = order_info self.bot = bot self._id_attrs = (self.id,) @classmethod def de_json(cls, data: Optional[JSONDict], bot: 'Bot') -> Optional['PreCheckoutQuery']: """See :meth:`telegram.TelegramObject.de_json`.""" data = cls._parse_data(data) if not data: return None data['from_user'] = User.de_json(data.pop('from'), bot) data['order_info'] = OrderInfo.de_json(data.get('order_info'), bot) return cls(bot=bot, **data) def answer( # pylint: disable=C0103 self, ok: bool, error_message: str = None, timeout: ODVInput[float] = DEFAULT_NONE, api_kwargs: JSONDict = None, ) -> bool: """Shortcut for:: bot.answer_pre_checkout_query(update.pre_checkout_query.id, *args, **kwargs) For the documentation of the arguments, please see :meth:`telegram.Bot.answer_pre_checkout_query`. """ return self.bot.answer_pre_checkout_query( pre_checkout_query_id=self.id, ok=ok, error_message=error_message, timeout=timeout, api_kwargs=api_kwargs, )
lgpl-3.0
-7,302,558,104,083,430,000
36.35
98
0.634921
false
3.982483
false
false
false
googleapis/googleapis-gen
google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/payment_mode.py
1
1182
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore __protobuf__ = proto.module( package='google.ads.googleads.v7.enums', marshal='google.ads.googleads.v7', manifest={ 'PaymentModeEnum', }, ) class PaymentModeEnum(proto.Message): r"""Container for enum describing possible payment modes. """ class PaymentMode(proto.Enum): r"""Enum describing possible payment modes.""" UNSPECIFIED = 0 UNKNOWN = 1 CLICKS = 4 CONVERSION_VALUE = 5 CONVERSIONS = 6 GUEST_STAY = 7 __all__ = tuple(sorted(__protobuf__.manifest))
apache-2.0
-7,034,423,592,130,008,000
28.55
74
0.680203
false
3.966443
false
false
false
dchaplinsky/pep.org.ua
pepdb/tasks/management/commands/export_foreign_companies.py
1
2271
# -*- coding: utf-8 -*- from __future__ import unicode_literals import xlsxwriter from django.core.management.base import BaseCommand from tasks.models import BeneficiariesMatching class Command(BaseCommand): help = ('Exports the list of foreign companies from declarations of PEPs ' 'which aren\'t yet in DB to an excel file for further processing ' 'and reconciliation with the registry') def add_arguments(self, parser): parser.add_argument( 'target_file', help='Excel file to export to', ) def handle(self, *args, **options): keys = [ "owner_name", "company_name_declaration", "company_name_en", "zip", "city", "street", "appt", "country", "company_code", "notes", "status", "company_name_orig", "link", "founder_1", "founder_2", "founder_3", "founder_4", "founder_5", "founder_6", "founder_7" ] workbook = xlsxwriter.Workbook(options["target_file"]) for kind, name in (("f", "Founders"), ("b", "Beneficiaries")): ws = workbook.add_worksheet(name) for i, f in enumerate(keys): ws.write(0, i, f) row = 1 for t in BeneficiariesMatching.objects.filter( status="n", type_of_connection=kind).nocache().iterator(): base_res = { "owner_name": t.person_json["full_name"] } for company in t.pep_company_information: res = base_res.copy() res["company_name_declaration"] = company["company_name"] res["company_name_en"] = company["en_name"] or "" res["country"] = company["country"] res["zip"] = company["address"] or "" res["company_code"] = company["beneficial_owner_company_code"] for i, f in enumerate(keys): ws.write(row, i, res.get(f, "")) row += 1 workbook.close()
mit
-8,220,127,339,190,007,000
30.541667
82
0.484808
false
4.22905
false
false
false
simplegeo/authorize
authorize/gen_xml.py
1
17930
# -*- encoding: utf-8 -*- import re import decimal from xml.etree.cElementTree import fromstring, tostring from xml.etree.cElementTree import Element, iselement from authorize import responses API_SCHEMA = 'https://api.authorize.net/xml/v1/schema/AnetApiSchema.xsd' API_SCHEMA_NS = "AnetApi/xml/v1/schema/AnetApiSchema.xsd" PREFIX = "{AnetApi/xml/v1/schema/AnetApiSchema.xsd}" INDIVIDUAL = u"individual" BUSINESS = u"business" ECHECK_CCD = u"CCD" ECHECK_PPD = u"PPD" ECHECK_TEL = u"TEL" ECHECK_WEB = u"WEB" BANK = u"bank" CREDIT_CARD = u"cc" ECHECK = u"echeck" DAYS_INTERVAL = u"days" MONTHS_INTERVAL = u"months" VALIDATION_NONE = u"none" VALIDATION_TEST = u"testMode" VALIDATION_LIVE = u"liveMode" ACCOUNT_CHECKING = u"checking" ACCOUNT_SAVINGS = u"savings" ACCOUNT_BUSINESS_CHECKING = u"businessChecking" AUTH_ONLY = u"auth_only" CAPTURE_ONLY = u"capture_only" AUTH_CAPTURE = u"auth_capture" CREDIT = u"credit" PRIOR_AUTH_CAPTURE = u"prior_auth_capture" VOID = u"void" class AuthorizeSystemError(Exception): """ I'm a serious kind of exception and I'm raised when something went really bad at a lower level than the application level, like when Authorize is down or when they return an unparseable response """ def __init__(self, *args): self.args = args def __str__(self): return "Exception: %s caused by %s" % self.args def __repr__(self): # Here we are printing a tuple, the , at the end is _required_ return "AuthorizeSystemError%s" % (self.args,) c = re.compile(r'([A-Z]+[a-z_]+)') def convert(arg): """ Convert an object to its xml representation """ if iselement(arg): return arg # the element if isinstance(arg, dict_accessor): try: return arg.text_ except: raise Exception("Cannot serialize %s, missing text_ attribute" % (arg,)) if isinstance(arg, dict): return arg # attributes of the element if isinstance(arg, unicode): return arg if isinstance(arg, decimal.Decimal): return unicode(arg) if arg is True: return 'true' if arg is False: return 'false' if isinstance(arg, float): return unicode(round(arg, 2)) # there's nothing less than cents anyway if isinstance(arg, (int, long)): return unicode(arg) if isinstance(arg, str): raise Exception("'%s' not unicode: can only accept unicode strings" % (arg,)) raise Exception("Cannot convert %s of type %s" % (arg, type(arg))) def utf8convert(arg): """ Further extend L{convert} to return UTF-8 strings instead of unicode. """ value = convert(arg) if isinstance(value, unicode): return value.encode('utf-8') return value class XMLBuilder(object): """ XMLBuilder tries to be slightly clever in order to be easier for the programmer. If you try to add arguments that are None they won't be added to the output because empty XML tags are not worth the bandwidth and actually mean something different than None. """ def __getattr__(self, key): def _wrapper_func(*args): converted = [convert(arg) for arg in args if arg is not None] if not converted: return None el = Element(key) settext = False setatts = False for arg in converted: if iselement(arg): el.append(arg) elif isinstance(arg, basestring): assert not settext, "cannot set text twice" el.text = arg settext = True elif isinstance(arg, dict): assert not setatts, "cannot set attributes twice" for k, v in arg.iteritems(): el.set(k, v) setatts = True else: raise TypeError("unhandled argument type: %s" % type(arg)) return el return _wrapper_func x = XMLBuilder() def flatten(tree): """ Return a flattened tree in string format encoded in utf-8 """ return tostring(tree, "utf-8") def purify(s): """ s is an etree.tag and contains also information on the namespace, if that information is present try to remove it, then convert the camelCaseTags to underscore_notation_more_python_friendly. """ if s.startswith(PREFIX): s = s[len(PREFIX):] return '_'.join(atom.lower() for atom in c.split(s) if atom) class dict_accessor(dict): """ Allow accessing a dictionary content also using dot-notation. """ def __getattr__(self, attr): return super(dict_accessor, self).__getitem__(attr) def __setattr__(self, attr, value): super(dict_accessor, self).__setitem__(attr, value) def parse_node(node): """ Return a dict_accessor representation of the node. """ new = dict_accessor({}) if node.text and node.text.strip(): t = node.text if isinstance(t, unicode): new['text_'] = t else: new['text_'] = t.decode('utf-8', "replace") if node.attrib: new['attrib_'] = dict_accessor(node.attrib) for child in node.getchildren(): tag = purify(child.tag) child = parse_node(child) if tag not in new: new[tag] = child else: old = new[tag] if not isinstance(old, list): new[tag] = [old] new[tag].append(child) return new def to_dict(s, error_codes, do_raise=True, delimiter=u',', encapsulator=u'', uniform=False): """ Return a dict_accessor representation of the given string, if raise_ is True an exception is raised when an error code is present. """ try: t = fromstring(s) except SyntaxError, e: raise AuthorizeSystemError(e, s) parsed = dict_accessor(parse_node(t)) # discard the root node which is useless try: if isinstance(parsed.messages.message, list): # there's more than a child return parsed code = parsed.messages.message.code.text_ if uniform: parsed.messages.message = [parsed.messages.message] except KeyError: return parsed if code in error_codes: if do_raise: raise error_codes[code] dr = None if parsed.get('direct_response') is not None: dr = parsed.direct_response.text_ elif parsed.get('validation_direct_response') is not None: dr = parsed.validation_direct_response.text_ if dr is not None: parsed.direct_response = parse_direct_response(dr, delimiter, encapsulator) return parsed m = ['code', 'subcode', 'reason_code', 'reason_text', 'auth_code', 'avs', 'trans_id', 'invoice_number', 'description', 'amount', 'method', 'trans_type', 'customer_id', 'first_name', 'last_name', 'company', 'address', 'city', 'state', 'zip', 'country', 'phone', 'fax', 'email', 'ship_first_name', 'ship_last_name', 'ship_company', 'ship_address', 'ship_city', 'ship_state', 'ship_zip', 'ship_country', 'tax', 'duty', 'freight', 'tax_exempt', 'po_number', 'md5_hash', 'ccv', 'holder_verification'] def parse_direct_response(s, delimiter=u',', encapsulator=u''): """ Very simple format but made of many fields, the most complex ones have the following meanings: code: see L{responses.aim_codes} for all the codes avs: see L{responses.avs_codes} for all the codes method: CC or ECHECK trans_type: AUTH_CAPTURE AUTH_ONLY CAPTURE_ONLY CREDIT PRIOR_AUTH_CAPTURE VOID tax_exempt: true, false, T, F, YES, NO, Y, N, 1, 0 ccv: see L{responses.ccv_codes} for all the codes holder_verification: see L{responses.holder_verification_codes} for all the codes """ if not isinstance(s, unicode): s = s.decode('utf-8', 'replace') # being <e> the encapsulator and <d> the delimiter # this is the format of the direct response: # <e>field<e><d><e>field<e><d><e>field<e> # # Here's a regexp that would parse this: # "\<e>([^\<d>\<e>]*)\<e>\<d>?" # But it has a problem when <e> is '' and I don't # have the will to do the much harder one that actually # does it well... So let's just split and strip. e = encapsulator d = delimiter v = s.split(e+d+e) v[0] = v[0].lstrip(e) v[-1] = v[-1].rstrip(e) if not len(v) >= len(m): d = dict_accessor({'error': "Couldn't parse the direct response"}) else: d = dict_accessor(dict(zip(m, v))) d.original = s return d def macro(action, login, key, *body): """ Main XML structure re-used by every request. """ return getattr(x, action)( {'xmlns': API_SCHEMA_NS}, x.merchantAuthentication( x.name(login), x.transactionKey(key) ), *body ) def _address(pre='', kw={}, *extra): """ Basic address components with extension capability. """ return [ x.firstName(kw.get(pre+'first_name')), # optional x.lastName(kw.get(pre+'last_name')), # optional x.company(kw.get(pre+'company')), # optional x.address(kw.get(pre+'address')), # optional x.city(kw.get(pre+'city')), # optional x.state(kw.get(pre+'state')), # optional x.zip(kw.get(pre+'zip')), # optional x.country(kw.get(pre+'country')) # optional ] + list(extra) def address(pre='', **kw): """ Simple address with prefixing possibility """ return x.address( *_address(pre, kw) ) def address_2(pre='', **kw): """ Extended address with phoneNumber and faxNumber in the same tag """ return x.address( *_address(pre, kw, x.phoneNumber(kw.get(pre+'phone')), x.faxNumber(kw.get(pre+'fax')) ) ) def update_address(**kw): return x.address( *_address('ship_', kw, x.phoneNumber(kw.get('ship_phone')), x.faxNumber(kw.get('ship_fax')), x.customerAddressId(kw['customer_address_id']) ) ) def billTo(**kw): return x.billTo( *_address('bill_', kw, x.phoneNumber(kw.get('bill_phone')), # optional x.faxNumber(kw.get('bill_fax')) # optional )# optional ) def arbBillTo(**kw): # This is just to be sure that they were passed. # as the spec requires kw['bill_first_name'] kw['bill_last_name'] return x.billTo( *_address('bill_', kw) ) def _shipTo(**kw): return _address('ship_', kw, x.phoneNumber(kw.get('ship_phone')), x.faxNumber(kw.get('ship_fax')) ) def shipToList(**kw): return x.shipToList( *_shipTo(**kw) ) def shipTo(**kw): return x.shipTo( *_shipTo(**kw) ) def payment(**kw): profile_type = kw.get('profile_type', CREDIT_CARD) if profile_type == CREDIT_CARD: return x.payment( x.creditCard( x.cardNumber(kw['card_number']), x.expirationDate(kw['expiration_date']), # YYYY-MM x.cardCode(kw['csc']) ) ) elif profile_type == BANK: return x.payment( x.bankAccount( x.accountType(kw.get('account_type')), # optional: checking, savings, businessChecking x.routingNumber(kw['routing_number']), # 9 digits x.accountNumber(kw['account_number']), # 5 to 17 digits x.nameOnAccount(kw['name_on_account']), x.echeckType(kw.get('echeck_type')), # optional: CCD, PPD, TEL, WEB x.bankName(kw.get('bank_name')) # optional ) ) def transaction(**kw): assert len(kw.get('line_items', [])) <= 30 content = [ x.amount(kw['amount']), x.tax( x.amount(kw.get('tax_amount')), x.name(kw.get('tax_name')), x.description(kw.get('tax_descr')) ), x.shipping( x.amount(kw.get('ship_amount')), x.name(kw.get('ship_name')), x.name(kw.get('ship_description')) ), x.duty( x.amount(kw.get('duty_amount')), x.name(kw.get('duty_name')), x.description(kw.get('duty_description')) ) ] + list( x.lineItems( x.itemId(line.get('item_id')), x.name(line['name']), x.description(line.get('description')), x.quantity(line.get('quantity')), x.unitPrice(line.get('unit_price')), x.taxable(line.get('taxable')) ) for line in kw.get('line_items', []) ) + [ x.customerProfileId(kw['customer_profile_id']), x.customerPaymentProfileId(kw['customer_payment_profile_id']), x.customerAddressId(kw.get('customer_address_id')), ] ptype = kw.get('profile_type', AUTH_ONLY) if ptype in (AUTH_ONLY, CAPTURE_ONLY, AUTH_CAPTURE, CREDIT): content += [ x.order( x.invoiceNumber(kw.get('invoice_number')), x.description(kw.get('description')), x.purchaseOrderNumber(kw.get('purchase_order_number')) ) ] if ptype in (AUTH_ONLY, CAPTURE_ONLY, AUTH_CAPTURE): content += [ x.taxExempt(kw.get('tax_exempt', False)), x.recurringBilling(kw.get('recurring', False)), x.cardCode(kw.get('ccv')) ] if ptype == AUTH_ONLY: profile_type = x.profileTransAuthOnly( *content ) elif ptype == CAPTURE_ONLY: profile_type = x.profileTransCaptureOnly( *(content + [x.approvalCode(kw['approval_code'])]) ) elif ptype == AUTH_CAPTURE: profile_type = x.profileTransAuthCapture( *content ) elif ptype == PRIOR_AUTH_CAPTURE: profile_type = x.profileTransPriorAuthCapture( *(content + [x.transId(kw['trans_id'])]) ) # NOTE: It is possible to issue a refund without the customerProfileId and # the customerPaymentProfileId being supplied. However, this is not # currently supported, and requires sending the masked credit card number. elif ptype == CREDIT: profile_type = x.profileTransRefund( *(content + [x.transId(kw['trans_id'])]) ) elif ptype == VOID: profile_type = x.profileTransVoid( *(content + [x.transId(kw['trans_id'])]) ) else: raise Exception("Unsupported profile type: %r" % (ptype,)) return x.transaction(profile_type) def paymentProfiles(**kw): return x.paymentProfiles( x.customerType(kw.get('customer_type')), # optional: individual, business billTo(**kw), payment(**kw) ) def update_paymentProfile(**kw): return x.paymentProfile( x.customerType(kw.get('customer_type')), # optional billTo(**kw), payment(**kw), x.customerPaymentProfileId(kw['customer_payment_profile_id']) ) def paymentProfile(**kw): return x.paymentProfile( x.customerType(kw.get('customer_type')), # optional billTo(**kw), payment(**kw) ) def profile(**kw): content = [ x.merchantCustomerId(kw['customer_id']), x.description(kw.get('description')), x.email(kw.get('email')), ] payment_profiles = kw.get('payment_profiles', None) if payment_profiles is not None: content = content + list( paymentProfiles(**prof) for prof in payment_profiles ) else: if kw.get('card_number') or kw.get("routing_number"): content = content + [paymentProfiles(**kw)] return x.profile( *(content + [shipToList(**kw)]) ) def subscription(**kw): trial_occurrences = kw.get('trial_occurrences') trial_amount = None if trial_occurrences is not None: trial_amount = kw['trial_amount'] return x.subscription( x.name(kw.get('subscription_name')), x.paymentSchedule( x.interval( x.length(kw.get('interval_length')), # up to 3 digits, 1-12 for months, 7-365 days x.unit(kw.get('interval_unit')) # days or months ), x.startDate(kw.get('start_date')), # YYYY-MM-DD x.totalOccurrences(kw.get('total_occurrences', 9999)), x.trialOccurrences(trial_occurrences) ), x.amount(kw.get('amount')), x.trialAmount(trial_amount), payment(**kw), x.order( x.invoiceNumber(kw.get('invoice_number')), x.description(kw.get('description')) ), x.customer( x.type(kw.get('customer_type')), # individual, business x.id(kw.get('customer_id')), x.email(kw.get('customer_email')), x.phoneNumber(kw.get('phone')), x.faxNumber(kw.get('fax')), x.driversLicense( x.number(kw.get('driver_number')), x.state(kw.get('driver_state')), x.dateOfBirth(kw.get('driver_birth')) ), x.taxId(kw.get('tax_id')) ), arbBillTo(**kw), shipTo(**kw) ) def base(action, login, key, kw, *main): return flatten( macro(action, login, key, x.refId(kw.get('ref_id')), *main ) ) __doc__ = """\ Please refer to http://www.authorize.net/support/CIM_XML_guide.pdf for documentation on the XML protocol implemented here. """
mit
7,084,245,678,588,358,000
30.236934
102
0.566481
false
3.686266
false
false
false
singingwolfboy/flask-dance
flask_dance/contrib/gitlab.py
1
4319
from flask_dance.consumer import OAuth2ConsumerBlueprint from flask_dance.consumer.requests import OAuth2Session from functools import partial from flask.globals import LocalProxy, _lookup_app_object from flask import _app_ctx_stack as stack __maintainer__ = "Justin Georgeson <jgeorgeson@lopht.net>" class NoVerifyOAuth2Session(OAuth2Session): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.verify = False def make_gitlab_blueprint( client_id=None, client_secret=None, *, scope=None, redirect_url=None, redirect_to=None, login_url=None, authorized_url=None, session_class=None, storage=None, hostname="gitlab.com", verify_tls_certificates=True, rule_kwargs=None, ): """ Make a blueprint for authenticating with GitLab using OAuth 2. This requires a client ID and client secret from GitLab. You should either pass them to this constructor, or make sure that your Flask application config defines them, using the variables :envvar:`GITLAB_OAUTH_CLIENT_ID` and :envvar:`GITLAB_OAUTH_CLIENT_SECRET`. Args: client_id (str): The client ID for your application on GitLab. client_secret (str): The client secret for your application on GitLab scope (str, optional): comma-separated list of scopes for the OAuth token redirect_url (str): the URL to redirect to after the authentication dance is complete redirect_to (str): if ``redirect_url`` is not defined, the name of the view to redirect to after the authentication dance is complete. The actual URL will be determined by :func:`flask.url_for` login_url (str, optional): the URL path for the ``login`` view. Defaults to ``/gitlab`` authorized_url (str, optional): the URL path for the ``authorized`` view. Defaults to ``/gitlab/authorized``. session_class (class, optional): The class to use for creating a Requests session. Defaults to :class:`~flask_dance.consumer.requests.OAuth2Session`. storage: A token storage class, or an instance of a token storage class, to use for this blueprint. Defaults to :class:`~flask_dance.consumer.storage.session.SessionStorage`. hostname (str, optional): If using a private instance of GitLab CE/EE, specify the hostname, default is ``gitlab.com``. verify_tls_certificates (bool, optional): Specify whether TLS certificates should be verified. Set this to ``False`` if certificates fail to validate for self-hosted GitLab instances. rule_kwargs (dict, optional): Additional arguments that should be passed when adding the login and authorized routes. Defaults to ``None``. specify the hostname, default is ``gitlab.com`` :rtype: :class:`~flask_dance.consumer.OAuth2ConsumerBlueprint` :returns: A :doc:`blueprint <flask:blueprints>` to attach to your Flask app. """ if not verify_tls_certificates: if session_class: raise ValueError( "cannot override session_class and disable certificate validation" ) else: session_class = NoVerifyOAuth2Session gitlab_bp = OAuth2ConsumerBlueprint( "gitlab", __name__, client_id=client_id, client_secret=client_secret, scope=scope, base_url=f"https://{hostname}/api/v4/", authorization_url=f"https://{hostname}/oauth/authorize", token_url=f"https://{hostname}/oauth/token", redirect_url=redirect_url, redirect_to=redirect_to, login_url=login_url, authorized_url=authorized_url, session_class=session_class, storage=storage, token_url_params={"verify": verify_tls_certificates}, rule_kwargs=rule_kwargs, ) gitlab_bp.from_config["client_id"] = "GITLAB_OAUTH_CLIENT_ID" gitlab_bp.from_config["client_secret"] = "GITLAB_OAUTH_CLIENT_SECRET" @gitlab_bp.before_app_request def set_applocal_session(): ctx = stack.top ctx.gitlab_oauth = gitlab_bp.session return gitlab_bp gitlab = LocalProxy(partial(_lookup_app_object, "gitlab_oauth"))
mit
-1,558,331,981,668,643,000
38.990741
92
0.659875
false
4.148895
false
false
false
badbytes/pymeg
pdf2py/update_data_header.py
1
2127
# update_data_header.py # # Copyright 2010 dan collins <danc@badbytes.net> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. '''update_data_header.py updates the p.data.hdr header to prepare for rewrite of 4D format data. accomidates when channels read are less that in orig file, or window changes, etc''' from numpy import * def cutchannels(data): data.hdr.header_data.total_chans = array([data.channels.indexlist.__len__()], dtype=data.hdr.header_data.total_chans.dtype) channel_ref_data = arange(data.channels.indexlist.__len__()).tolist() #channel_ref_data = [] #for i in data.channels.indexlist: for i in range(0, data.channels.indexlist.__len__()): #print data.channels.reverseindex[i], channel_ref_data, data.hdr.channel_ref_data[data.channels.reverseindex[i]] try: channel_ref_data[i] = data.hdr.channel_ref_data[data.channels.indexlist[i]] #channel_ref_data.append(data.hdr.channel_ref_data[i]) #data.hdr.channel_ref_data[i].index = array([channel_ref_data.__len__()], dtype=data.hdr.channel_ref_data[i].index.dtype) channel_ref_data[i].index = array([i], dtype=data.hdr.channel_ref_data[data.channels.indexlist[i]].index.dtype) except IndexError: print 'IndexError... NEED TO FIX' #print channel_ref_data[i].index data.hdr.channel_ref_data = channel_ref_data
gpl-3.0
-5,053,035,822,500,065,000
47.340909
129
0.684532
false
3.57479
false
false
false
google/revisiting-self-supervised
self_supervision/patch_utils.py
1
12719
#!/usr/bin/python # # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utils for patch based image processing.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import struct import numpy as np import tensorflow as tf import tensorflow_hub as hub import preprocess import utils from models.utils import get_net from trainer import make_estimator FLAGS = tf.flags.FLAGS PATCH_H_COUNT = 3 PATCH_W_COUNT = 3 PATCH_COUNT = PATCH_H_COUNT * PATCH_W_COUNT # It's supposed to be in the root folder, which is also pwd when running, if the # instructions in the README are followed. Hence not a flag. PERMUTATION_PATH = 'permutations_100_max.bin' def apply_model(image_fn, is_training, num_outputs, perms, make_signature=False): """Creates the patch based model output from patches representations. Args: image_fn: function returns image tensor. is_training: is training flag used for batch norm and drop out. num_outputs: number of output classes. perms: numpy array with shape [m, k], element range [0, PATCH_COUNT). k stands for the patch numbers used in a permutation. m stands forthe number of permutations. Each permutation is used to concat the patch inputs [n*PATCH_COUNT, h, w, c] into tensor with shape [n*m, h, w, c*k]. make_signature: whether to create signature for hub module. Returns: out: output tensor with shape [n*m, 1, 1, num_outputs]. Raises: ValueError: An error occurred when the architecture is unknown. """ images = image_fn() net = get_net(num_classes=FLAGS.get_flag_value('embed_dim', 1000)) out, end_points = net(images, is_training, weight_decay=FLAGS.get_flag_value('weight_decay', 1e-4)) print(end_points) if not make_signature: out = permutate_and_concat_batch_patches(out, perms) out = fully_connected(out, num_outputs, is_training=is_training) out = tf.squeeze(out, [1, 2]) if make_signature: hub.add_signature(inputs={'image': images}, outputs=out) hub.add_signature( name='representation', inputs={'image': images}, outputs=end_points) return out def image_grid(images, ny, nx, padding=0): """Create a batch of image grids from a batch of images. Args: images: A batch of patches (B,N,H,W,C) ny: vertical number of images nx: horizontal number of images padding: number of zeros between images, if any. Returns: A tensor batch of image grids shaped (B,H*ny,W*nx,C), although that is a simplifying lie: if padding is used h/w will be different. """ with tf.name_scope('grid_image'): if padding: padding = [padding, padding] images = tf.pad(images, [[0, 0], [0, 0], padding, padding, [0, 0]]) return tf.concat([ tf.concat([images[:, y * nx + x] for x in range(nx)], axis=-2) for y in range(ny)], axis=-3) def creates_estimator_model(images, labels, perms, num_classes, mode): """Creates EstimatorSpec for the patch based self supervised models. Args: images: images labels: self supervised labels (class indices) perms: patch permutations num_classes: number of different permutations mode: model's mode: training, eval or prediction Returns: EstimatorSpec """ print(' +++ Mode: %s, images: %s, labels: %s' % (mode, images, labels)) images = tf.reshape(images, shape=[-1] + images.get_shape().as_list()[-3:]) if mode in [tf.estimator.ModeKeys.TRAIN, tf.estimator.ModeKeys.EVAL]: with tf.variable_scope('module'): image_fn = lambda: images logits = apply_model( image_fn=image_fn, is_training=(mode == tf.estimator.ModeKeys.TRAIN), num_outputs=num_classes, perms=perms, make_signature=False) else: input_shape = utils.str2intlist( FLAGS.get_flag_value('serving_input_shape', 'None,None,None,3')) image_fn = lambda: tf.placeholder( # pylint: disable=g-long-lambda shape=input_shape, dtype=tf.float32) apply_model_function = functools.partial( apply_model, image_fn=image_fn, num_outputs=num_classes, perms=perms, make_signature=True) tf_hub_module_spec = hub.create_module_spec( apply_model_function, [(utils.TAGS_IS_TRAINING, { 'is_training': True }), (set(), { 'is_training': False })], drop_collections=['summaries']) tf_hub_module = hub.Module(tf_hub_module_spec, trainable=False, tags=set()) hub.register_module_for_export(tf_hub_module, export_name='module') logits = tf_hub_module(images) return make_estimator(mode, predictions=logits) # build loss and accuracy loss = tf.nn.sparse_softmax_cross_entropy_with_logits( labels=labels, logits=logits) loss = tf.reduce_mean(loss) eval_metrics = ( lambda labels, logits: { # pylint: disable=g-long-lambda 'accuracy': tf.metrics.accuracy( labels=labels, predictions=tf.argmax(logits, axis=-1))}, [labels, logits]) return make_estimator(mode, loss, eval_metrics, logits) def fully_connected(inputs, num_classes=100, weight_decay=5e-4, keep_prob=0.5, is_training=True): """Two layers fully connected network copied from Alexnet fc7-fc8.""" net = inputs _, _, w, _ = net.get_shape().as_list() kernel_regularizer = tf.contrib.layers.l2_regularizer(scale=weight_decay) net = tf.layers.conv2d( net, filters=4096, kernel_size=w, padding='same', kernel_initializer=tf.truncated_normal_initializer(0.0, 0.005), bias_initializer=tf.constant_initializer(0.1), kernel_regularizer=kernel_regularizer) net = tf.layers.batch_normalization( net, momentum=0.997, epsilon=1e-5, fused=None, training=is_training) net = tf.nn.relu(net) if is_training: net = tf.nn.dropout(net, keep_prob=keep_prob) net = tf.layers.conv2d( net, filters=num_classes, kernel_size=1, padding='same', kernel_initializer=tf.truncated_normal_initializer(0.0, 0.005), bias_initializer=tf.zeros_initializer(), kernel_regularizer=kernel_regularizer) return net def generate_patch_locations(): """Generates relative patch locations.""" perms = np.array([(i, 4) for i in range(9) if i != 4]) return perms, len(perms) def load_permutations(): """Loads a set of pre-defined permutations.""" with tf.gfile.Open(PERMUTATION_PATH, 'rb') as f: int32_size = 4 s = f.read(int32_size * 2) [num_perms, c] = struct.unpack('<ll', s) perms = [] for _ in range(num_perms * c): s = f.read(int32_size) x = struct.unpack('<l', s) perms.append(x[0]) perms = np.reshape(perms, [num_perms, c]) # The bin file used index [1,9] for permutation, updated to [0, 8] for index. perms = perms - 1 assert np.min(perms) == 0 and np.max(perms) == PATCH_COUNT - 1 return perms, num_perms def permutate_and_concat_image_patches(patch_embeddings, perms): """Permutates patches from an image according to permutations. Args: patch_embeddings: input tensor with shape [PATCH_COUNT, h, w, c], where PATCH_COUNT is the patch number per image. perms: numpy array with shape [m, k], with element in range [0, PATCH_COUNT). Permutation is used to concat the patches. Returns: out: output tensor with shape [m, h, w, c*k]. """ _, h, w, c = patch_embeddings.get_shape().as_list() if isinstance(perms, np.ndarray): num_perms, perm_len = perms.shape else: num_perms, perm_len = perms.get_shape().as_list() def permutate_patch(perm): permed = tf.gather(patch_embeddings, perm, axis=0) concat_tensor = tf.transpose(permed, perm=[1, 2, 3, 0]) concat_tensor = tf.reshape( concat_tensor, shape=[-1, h, w, perm_len * c]) return concat_tensor permed_patches = tf.stack([ permutate_patch(perms[i]) for i in range(num_perms) ]) return permed_patches def permutate_and_concat_batch_patches(batch_patch_embeddings, perms): """Permutates patches from a mini batch according to permutations. Args: batch_patch_embeddings: input tensor with shape [n*PATCH_COUNT, h, w, c] or [n*PATCH_COUNT, c], where PATCH_COUNT is the patch number per image and n is the number of images in this mini batch. perms: numpy array with shape [m, k], with element in range [0, PATCH_COUNT). Permutation is used to concat the patches. Returns: out: output tensor with shape [n*m, h, w, c*k]. """ print(' +++ permutate patches input: %s' % batch_patch_embeddings) if len(batch_patch_embeddings.get_shape().as_list()) == 4: _, h, w, c = batch_patch_embeddings.get_shape().as_list() elif len(batch_patch_embeddings.get_shape().as_list()) == 2: _, c = batch_patch_embeddings.get_shape().as_list() h, w = (1, 1) else: raise ValueError('Unexpected batch_patch_embeddings shape: %s' % batch_patch_embeddings.get_shape().as_list()) patches = tf.reshape(batch_patch_embeddings, shape=[-1, PATCH_COUNT, h, w, c]) patches = tf.stack([ permutate_and_concat_image_patches(patches[i], perms) for i in range(patches.get_shape().as_list()[0]) ]) patches = tf.reshape(patches, shape=[-1, h, w, perms.shape[1] * c]) print(' +++ permutate patches output: %s' % batch_patch_embeddings) return patches def get_patch_representation( images, hub_module, patch_preprocess='crop_patches,standardization', is_training=False, target_features=9000, pooling_fn=None, combine_patches='concat', signature='representation'): """Permutates patches from a mini batch according to permutations. Args: images: input images, can be full image (NHWC) or image patchs (NPHWC). hub_module: hub module. patch_preprocess: preprocess applied to the image. Note that preprocess may require setting parameters in the FLAGS.config file. is_training: is training mode. target_features: target feature dimension. Note that the features might exceed this number if there're too many channels. pooling_fn: pooling method applied to the features. combine_patches: one of {'concat', 'max_pool', 'avg_pool'}. signature: signature for the hub module. Returns: out: output representation tensors. Raises: ValueError: unsupported combine_patches. """ if patch_preprocess: preprocess_fn = preprocess.get_preprocess_fn(patch_preprocess, is_training) images = preprocess_fn({'image': images})['image'] assert len(images.get_shape().as_list()) == 5, 'Shape must match NPHWC.' _, num_of_patches, h, w, c = images.get_shape().as_list() images = tf.reshape(images, shape=[-1, h, w, c]) out_tensors = hub_module( images, signature=signature, as_dict=True) if combine_patches == 'concat': target_features = target_features // num_of_patches if pooling_fn is not None: out_tensors = pooling_fn(out_tensors) for k, t in out_tensors.iteritems(): if len(t.get_shape().as_list()) == 2: t = t[:, None, None, :] assert len(t.get_shape().as_list()) == 4, 'Unsupported rank %d' % len( t.get_shape().as_list()) # Take patch-dimension out of batch-dimension: [NP]HWC -> NPHWC t = tf.reshape(t, [-1, num_of_patches] + t.get_shape().as_list()[-3:]) if combine_patches == 'concat': # [N, P, H, W, C] -> [N, H, W, P*C] _, p, h, w, c = t.get_shape().as_list() out_tensors[k] = tf.reshape( tf.transpose(t, perm=[0, 2, 3, 4, 1]), tf.stack([-1, h, w, p * c])) elif combine_patches == 'max_pool': # Reduce max on P channel of NPHWC. out_tensors[k] = tf.reduce_max(t, axis=1) elif combine_patches == 'avg_pool': # Reduce mean on P channel of NPHWC. out_tensors[k] = tf.reduce_mean(t, axis=1) else: raise ValueError( 'Unsupported combine patches method %s.' % combine_patches) return out_tensors
apache-2.0
1,947,931,036,470,123,300
33.008021
80
0.652174
false
3.483703
false
false
false
DevynCJohnson/Pybooster
pylib/code_interpreter.py
1
5388
#!/usr/bin/env python3 # -*- coding: utf-8; Mode: Python; indent-tabs-mode: nil; tab-width: 4 -*- # vim: set fileencoding=utf-8 filetype=python syntax=python.doxygen fileformat=unix tabstop=4 expandtab : # kate: encoding utf-8; bom off; syntax python; indent-mode python; eol unix; replace-tabs off; indent-width 4; tab-width 4; remove-trailing-space on; """@brief Interpret various computer languages using installed interpreters. @file code_interpreter.py @package pybooster.code_interpreter @version 2019.07.14 @author Devyn Collier Johnson <DevynCJohnson@Gmail.com> @copyright LGPLv3 @section LICENSE GNU Lesser General Public License v3 Copyright (c) Devyn Collier Johnson, All rights reserved. This software is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this software. """ from subprocess import getoutput # nosec from sys import stdout __all__: list = [ # CLISP # r'execclispfile', # COFFEESCRIPT # r'execcoffeescript', # JAVASCRIPT # r'execjs', r'execjsfile', # LUA # r'execlua', r'execluafile', # PERL # r'execperl', r'execperlfile', r'initperl', # PHP # r'execphp', r'execphpfile', # RUBY # r'execruby', r'execrubyfile', # SCALA # r'execscala', r'execscala', # SHELL # r'execsh', r'execshfile', r'initsh' ] # CLISP # def execclispfile(_filename: str) -> str: """Execute a CLisp file given as a str and return the output as a str.""" return getoutput(r'clisp ' + _filename) # COFFEESCRIPT # def execcoffeescript(_code: str) -> str: """Execute Coffeescript code given as a str and return the output as a str.""" return getoutput('coffeescript --eval \'' + _code.replace('\'', '\\\'') + '\'') # JAVASCRIPT # def execjs(_code: str) -> str: """Execute JavaScript code given as a str and return the output as a str.""" return getoutput('jsc -e \'' + _code.replace('\'', '\\\'') + '\'') def execjsfile(_filename: str) -> str: """Execute a JavaScript file given as a str and return the output as a str.""" return getoutput(r'jsc -e ' + _filename) # LUA # def execlua(_code: str) -> str: """Execute Lua code given as a str and return the output as a str.""" return getoutput('lua -e \'' + _code.replace('\'', '\\\'') + '\'') def execluafile(_filename: str) -> str: """Execute a Lua script given as a str and return the output as a str.""" return getoutput(r'lua ' + _filename) # PERL # def execperl(_code: str) -> str: """Execute Perl code given as a str and return the output as a str.""" return getoutput('perl -e \'' + _code.replace('\'', '\\\'') + '\'') def execperlfile(_filename: str) -> str: """Execute a Perl script given as a str and return the output as a str.""" return getoutput(r'perl ' + _filename) def initperl() -> None: """Run a Perl REP-Loop (Read-Evaluate-Print-Loop).""" _input: str = r'' while 1: _input = input(r'Perl > ').replace('\'', '\\\'') # nosec if _input in {r'exit', r'quit'}: break stdout.write(getoutput('perl -e \'' + _input + '\'') + '\n') # PHP # def execphp(_code: str) -> str: """Execute PHP code given as a str and return the output as a str.""" return getoutput('php -r \'' + _code.replace('\'', '\\\'') + '\'') def execphpfile(_filename: str) -> str: """Execute a PHP script given as a str and return the output as a str.""" return getoutput(r'php -f ' + _filename) # RUBY # def execruby(_code: str) -> str: """Execute Ruby code given as a str and return the output as a str.""" return getoutput('ruby -e \'' + _code.replace('\'', '\\\'') + '\'') def execrubyfile(_filename: str) -> str: """Execute a Ruby script given as a str and return the output as a str.""" return getoutput(r'ruby ' + _filename) # SCALA # def execscala(_code: str) -> str: """Execute Scala code given as a str and return the output as a str.""" return getoutput('scala -e \'' + _code.replace('\'', '\\\'') + '\'') def execscalafile(_filename: str) -> str: """Execute a Scala file given as a str and return the output as a str.""" return getoutput(r'scala ' + _filename) # SHELL # def execsh(_code: str) -> str: """Execute Shell code given as a str and return the output as a str.""" return getoutput('sh -c \'' + _code.replace('\'', '\\\'') + '\'') def execshfile(_filename: str) -> str: """Execute a Shell script given as a str and return the output as a str.""" return getoutput(r'sh ' + _filename) def initsh() -> None: """Run a shell REP-Loop (Read-Evaluate-Print-Loop).""" _input: str = r'' while 1: _input = input(r'Shell: $ ').replace('\'', '\\\'') # nosec if _input in {r'exit', r'quit'}: break stdout.write(getoutput('sh -c \'' + _input + '\'') + '\n')
lgpl-3.0
9,036,564,200,472,079,000
27.209424
150
0.627506
false
3.332096
false
false
false
dcluna/screenkey
setup.py
1
1475
#!/usr/bin/env python from setuptools import setup setup(name='screenkey', version='0.5', description='A screencast tool to display keys', author='Pablo Seminario', author_email='pabluk@gmail.com', maintainer='Yuri D\'Elia', maintainer_email='wavexx@thregr.org', license='GPLv3+', keywords='screencast keyboard keys', url='https://github.com/wavexx/screenkey', download_url='https://github.com/wavexx/screenkey/releases', classifiers=['Development Status :: 5 - Production/Stable', 'Environment :: X11 Applications :: GTK', 'Intended Audience :: Education', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Operating System :: POSIX', 'Operating System :: Unix', 'Topic :: Education', 'Topic :: Multimedia :: Graphics :: Presentation', 'Topic :: Multimedia :: Video :: Capture'], long_description=""" Screenkey is a useful tool for presentations or screencasts. Inspired by ScreenFlick and initially based on the key-mon project code. """, scripts=['screenkey'], packages=['Screenkey'], data_files=[('share/applications', ['data/screenkey.desktop']), ('share/doc/screenkey', ['README.rst', 'NEWS.rst'])], )
gpl-3.0
-1,585,662,621,625,723,100
39.972222
96
0.583051
false
4.325513
false
false
false
ryfeus/lambda-packs
Opencv_pil/source36/numpy/core/tests/test_shape_base.py
1
24251
from __future__ import division, absolute_import, print_function import pytest import sys import numpy as np from numpy.core import ( array, arange, atleast_1d, atleast_2d, atleast_3d, block, vstack, hstack, newaxis, concatenate, stack ) from numpy.core.shape_base import (_block_dispatcher, _block_setup, _block_concatenate, _block_slicing) from numpy.testing import ( assert_, assert_raises, assert_array_equal, assert_equal, assert_raises_regex, assert_warns ) from numpy.compat import long class TestAtleast1d(object): def test_0D_array(self): a = array(1) b = array(2) res = [atleast_1d(a), atleast_1d(b)] desired = [array([1]), array([2])] assert_array_equal(res, desired) def test_1D_array(self): a = array([1, 2]) b = array([2, 3]) res = [atleast_1d(a), atleast_1d(b)] desired = [array([1, 2]), array([2, 3])] assert_array_equal(res, desired) def test_2D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) res = [atleast_1d(a), atleast_1d(b)] desired = [a, b] assert_array_equal(res, desired) def test_3D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) a = array([a, a]) b = array([b, b]) res = [atleast_1d(a), atleast_1d(b)] desired = [a, b] assert_array_equal(res, desired) def test_r1array(self): """ Test to make sure equivalent Travis O's r1array function """ assert_(atleast_1d(3).shape == (1,)) assert_(atleast_1d(3j).shape == (1,)) assert_(atleast_1d(long(3)).shape == (1,)) assert_(atleast_1d(3.0).shape == (1,)) assert_(atleast_1d([[2, 3], [4, 5]]).shape == (2, 2)) class TestAtleast2d(object): def test_0D_array(self): a = array(1) b = array(2) res = [atleast_2d(a), atleast_2d(b)] desired = [array([[1]]), array([[2]])] assert_array_equal(res, desired) def test_1D_array(self): a = array([1, 2]) b = array([2, 3]) res = [atleast_2d(a), atleast_2d(b)] desired = [array([[1, 2]]), array([[2, 3]])] assert_array_equal(res, desired) def test_2D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) res = [atleast_2d(a), atleast_2d(b)] desired = [a, b] assert_array_equal(res, desired) def test_3D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) a = array([a, a]) b = array([b, b]) res = [atleast_2d(a), atleast_2d(b)] desired = [a, b] assert_array_equal(res, desired) def test_r2array(self): """ Test to make sure equivalent Travis O's r2array function """ assert_(atleast_2d(3).shape == (1, 1)) assert_(atleast_2d([3j, 1]).shape == (1, 2)) assert_(atleast_2d([[[3, 1], [4, 5]], [[3, 5], [1, 2]]]).shape == (2, 2, 2)) class TestAtleast3d(object): def test_0D_array(self): a = array(1) b = array(2) res = [atleast_3d(a), atleast_3d(b)] desired = [array([[[1]]]), array([[[2]]])] assert_array_equal(res, desired) def test_1D_array(self): a = array([1, 2]) b = array([2, 3]) res = [atleast_3d(a), atleast_3d(b)] desired = [array([[[1], [2]]]), array([[[2], [3]]])] assert_array_equal(res, desired) def test_2D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) res = [atleast_3d(a), atleast_3d(b)] desired = [a[:,:, newaxis], b[:,:, newaxis]] assert_array_equal(res, desired) def test_3D_array(self): a = array([[1, 2], [1, 2]]) b = array([[2, 3], [2, 3]]) a = array([a, a]) b = array([b, b]) res = [atleast_3d(a), atleast_3d(b)] desired = [a, b] assert_array_equal(res, desired) class TestHstack(object): def test_non_iterable(self): assert_raises(TypeError, hstack, 1) def test_empty_input(self): assert_raises(ValueError, hstack, ()) def test_0D_array(self): a = array(1) b = array(2) res = hstack([a, b]) desired = array([1, 2]) assert_array_equal(res, desired) def test_1D_array(self): a = array([1]) b = array([2]) res = hstack([a, b]) desired = array([1, 2]) assert_array_equal(res, desired) def test_2D_array(self): a = array([[1], [2]]) b = array([[1], [2]]) res = hstack([a, b]) desired = array([[1, 1], [2, 2]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): hstack((np.arange(3) for _ in range(2))) if sys.version_info.major > 2: # map returns a list on Python 2 with assert_warns(FutureWarning): hstack(map(lambda x: x, np.ones((3, 2)))) class TestVstack(object): def test_non_iterable(self): assert_raises(TypeError, vstack, 1) def test_empty_input(self): assert_raises(ValueError, vstack, ()) def test_0D_array(self): a = array(1) b = array(2) res = vstack([a, b]) desired = array([[1], [2]]) assert_array_equal(res, desired) def test_1D_array(self): a = array([1]) b = array([2]) res = vstack([a, b]) desired = array([[1], [2]]) assert_array_equal(res, desired) def test_2D_array(self): a = array([[1], [2]]) b = array([[1], [2]]) res = vstack([a, b]) desired = array([[1], [2], [1], [2]]) assert_array_equal(res, desired) def test_2D_array2(self): a = array([1, 2]) b = array([1, 2]) res = vstack([a, b]) desired = array([[1, 2], [1, 2]]) assert_array_equal(res, desired) def test_generator(self): with assert_warns(FutureWarning): vstack((np.arange(3) for _ in range(2))) class TestConcatenate(object): def test_returns_copy(self): a = np.eye(3) b = np.concatenate([a]) b[0, 0] = 2 assert b[0, 0] != a[0, 0] def test_exceptions(self): # test axis must be in bounds for ndim in [1, 2, 3]: a = np.ones((1,)*ndim) np.concatenate((a, a), axis=0) # OK assert_raises(np.AxisError, np.concatenate, (a, a), axis=ndim) assert_raises(np.AxisError, np.concatenate, (a, a), axis=-(ndim + 1)) # Scalars cannot be concatenated assert_raises(ValueError, concatenate, (0,)) assert_raises(ValueError, concatenate, (np.array(0),)) # test shapes must match except for concatenation axis a = np.ones((1, 2, 3)) b = np.ones((2, 2, 3)) axis = list(range(3)) for i in range(3): np.concatenate((a, b), axis=axis[0]) # OK assert_raises(ValueError, np.concatenate, (a, b), axis=axis[1]) assert_raises(ValueError, np.concatenate, (a, b), axis=axis[2]) a = np.moveaxis(a, -1, 0) b = np.moveaxis(b, -1, 0) axis.append(axis.pop(0)) # No arrays to concatenate raises ValueError assert_raises(ValueError, concatenate, ()) def test_concatenate_axis_None(self): a = np.arange(4, dtype=np.float64).reshape((2, 2)) b = list(range(3)) c = ['x'] r = np.concatenate((a, a), axis=None) assert_equal(r.dtype, a.dtype) assert_equal(r.ndim, 1) r = np.concatenate((a, b), axis=None) assert_equal(r.size, a.size + len(b)) assert_equal(r.dtype, a.dtype) r = np.concatenate((a, b, c), axis=None) d = array(['0.0', '1.0', '2.0', '3.0', '0', '1', '2', 'x']) assert_array_equal(r, d) out = np.zeros(a.size + len(b)) r = np.concatenate((a, b), axis=None) rout = np.concatenate((a, b), axis=None, out=out) assert_(out is rout) assert_equal(r, rout) def test_large_concatenate_axis_None(self): # When no axis is given, concatenate uses flattened versions. # This also had a bug with many arrays (see gh-5979). x = np.arange(1, 100) r = np.concatenate(x, None) assert_array_equal(x, r) # This should probably be deprecated: r = np.concatenate(x, 100) # axis is >= MAXDIMS assert_array_equal(x, r) def test_concatenate(self): # Test concatenate function # One sequence returns unmodified (but as array) r4 = list(range(4)) assert_array_equal(concatenate((r4,)), r4) # Any sequence assert_array_equal(concatenate((tuple(r4),)), r4) assert_array_equal(concatenate((array(r4),)), r4) # 1D default concatenation r3 = list(range(3)) assert_array_equal(concatenate((r4, r3)), r4 + r3) # Mixed sequence types assert_array_equal(concatenate((tuple(r4), r3)), r4 + r3) assert_array_equal(concatenate((array(r4), r3)), r4 + r3) # Explicit axis specification assert_array_equal(concatenate((r4, r3), 0), r4 + r3) # Including negative assert_array_equal(concatenate((r4, r3), -1), r4 + r3) # 2D a23 = array([[10, 11, 12], [13, 14, 15]]) a13 = array([[0, 1, 2]]) res = array([[10, 11, 12], [13, 14, 15], [0, 1, 2]]) assert_array_equal(concatenate((a23, a13)), res) assert_array_equal(concatenate((a23, a13), 0), res) assert_array_equal(concatenate((a23.T, a13.T), 1), res.T) assert_array_equal(concatenate((a23.T, a13.T), -1), res.T) # Arrays much match shape assert_raises(ValueError, concatenate, (a23.T, a13.T), 0) # 3D res = arange(2 * 3 * 7).reshape((2, 3, 7)) a0 = res[..., :4] a1 = res[..., 4:6] a2 = res[..., 6:] assert_array_equal(concatenate((a0, a1, a2), 2), res) assert_array_equal(concatenate((a0, a1, a2), -1), res) assert_array_equal(concatenate((a0.T, a1.T, a2.T), 0), res.T) out = res.copy() rout = concatenate((a0, a1, a2), 2, out=out) assert_(out is rout) assert_equal(res, rout) def test_bad_out_shape(self): a = array([1, 2]) b = array([3, 4]) assert_raises(ValueError, concatenate, (a, b), out=np.empty(5)) assert_raises(ValueError, concatenate, (a, b), out=np.empty((4,1))) assert_raises(ValueError, concatenate, (a, b), out=np.empty((1,4))) concatenate((a, b), out=np.empty(4)) def test_out_dtype(self): out = np.empty(4, np.float32) res = concatenate((array([1, 2]), array([3, 4])), out=out) assert_(out is res) out = np.empty(4, np.complex64) res = concatenate((array([0.1, 0.2]), array([0.3, 0.4])), out=out) assert_(out is res) # invalid cast out = np.empty(4, np.int32) assert_raises(TypeError, concatenate, (array([0.1, 0.2]), array([0.3, 0.4])), out=out) def test_stack(): # non-iterable input assert_raises(TypeError, stack, 1) # 0d input for input_ in [(1, 2, 3), [np.int32(1), np.int32(2), np.int32(3)], [np.array(1), np.array(2), np.array(3)]]: assert_array_equal(stack(input_), [1, 2, 3]) # 1d input examples a = np.array([1, 2, 3]) b = np.array([4, 5, 6]) r1 = array([[1, 2, 3], [4, 5, 6]]) assert_array_equal(np.stack((a, b)), r1) assert_array_equal(np.stack((a, b), axis=1), r1.T) # all input types assert_array_equal(np.stack(list([a, b])), r1) assert_array_equal(np.stack(array([a, b])), r1) # all shapes for 1d input arrays = [np.random.randn(3) for _ in range(10)] axes = [0, 1, -1, -2] expected_shapes = [(10, 3), (3, 10), (3, 10), (10, 3)] for axis, expected_shape in zip(axes, expected_shapes): assert_equal(np.stack(arrays, axis).shape, expected_shape) assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=2) assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=-3) # all shapes for 2d input arrays = [np.random.randn(3, 4) for _ in range(10)] axes = [0, 1, 2, -1, -2, -3] expected_shapes = [(10, 3, 4), (3, 10, 4), (3, 4, 10), (3, 4, 10), (3, 10, 4), (10, 3, 4)] for axis, expected_shape in zip(axes, expected_shapes): assert_equal(np.stack(arrays, axis).shape, expected_shape) # empty arrays assert_(stack([[], [], []]).shape == (3, 0)) assert_(stack([[], [], []], axis=1).shape == (0, 3)) # edge cases assert_raises_regex(ValueError, 'need at least one array', stack, []) assert_raises_regex(ValueError, 'must have the same shape', stack, [1, np.arange(3)]) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.arange(3), 1]) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.arange(3), 1], axis=1) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.zeros((3, 3)), np.zeros(3)], axis=1) assert_raises_regex(ValueError, 'must have the same shape', stack, [np.arange(2), np.arange(3)]) # generator is deprecated with assert_warns(FutureWarning): result = stack((x for x in range(3))) assert_array_equal(result, np.array([0, 1, 2])) class TestBlock(object): @pytest.fixture(params=['block', 'force_concatenate', 'force_slicing']) def block(self, request): # blocking small arrays and large arrays go through different paths. # the algorithm is triggered depending on the number of element # copies required. # We define a test fixture that forces most tests to go through # both code paths. # Ultimately, this should be removed if a single algorithm is found # to be faster for both small and large arrays. def _block_force_concatenate(arrays): arrays, list_ndim, result_ndim, _ = _block_setup(arrays) return _block_concatenate(arrays, list_ndim, result_ndim) def _block_force_slicing(arrays): arrays, list_ndim, result_ndim, _ = _block_setup(arrays) return _block_slicing(arrays, list_ndim, result_ndim) if request.param == 'force_concatenate': return _block_force_concatenate elif request.param == 'force_slicing': return _block_force_slicing elif request.param == 'block': return block else: raise ValueError('Unknown blocking request. There is a typo in the tests.') def test_returns_copy(self, block): a = np.eye(3) b = block(a) b[0, 0] = 2 assert b[0, 0] != a[0, 0] def test_block_total_size_estimate(self, block): _, _, _, total_size = _block_setup([1]) assert total_size == 1 _, _, _, total_size = _block_setup([[1]]) assert total_size == 1 _, _, _, total_size = _block_setup([[1, 1]]) assert total_size == 2 _, _, _, total_size = _block_setup([[1], [1]]) assert total_size == 2 _, _, _, total_size = _block_setup([[1, 2], [3, 4]]) assert total_size == 4 def test_block_simple_row_wise(self, block): a_2d = np.ones((2, 2)) b_2d = 2 * a_2d desired = np.array([[1, 1, 2, 2], [1, 1, 2, 2]]) result = block([a_2d, b_2d]) assert_equal(desired, result) def test_block_simple_column_wise(self, block): a_2d = np.ones((2, 2)) b_2d = 2 * a_2d expected = np.array([[1, 1], [1, 1], [2, 2], [2, 2]]) result = block([[a_2d], [b_2d]]) assert_equal(expected, result) def test_block_with_1d_arrays_row_wise(self, block): # # # 1-D vectors are treated as row arrays a = np.array([1, 2, 3]) b = np.array([2, 3, 4]) expected = np.array([1, 2, 3, 2, 3, 4]) result = block([a, b]) assert_equal(expected, result) def test_block_with_1d_arrays_multiple_rows(self, block): a = np.array([1, 2, 3]) b = np.array([2, 3, 4]) expected = np.array([[1, 2, 3, 2, 3, 4], [1, 2, 3, 2, 3, 4]]) result = block([[a, b], [a, b]]) assert_equal(expected, result) def test_block_with_1d_arrays_column_wise(self, block): # # # 1-D vectors are treated as row arrays a_1d = np.array([1, 2, 3]) b_1d = np.array([2, 3, 4]) expected = np.array([[1, 2, 3], [2, 3, 4]]) result = block([[a_1d], [b_1d]]) assert_equal(expected, result) def test_block_mixed_1d_and_2d(self, block): a_2d = np.ones((2, 2)) b_1d = np.array([2, 2]) result = block([[a_2d], [b_1d]]) expected = np.array([[1, 1], [1, 1], [2, 2]]) assert_equal(expected, result) def test_block_complicated(self, block): # a bit more complicated one_2d = np.array([[1, 1, 1]]) two_2d = np.array([[2, 2, 2]]) three_2d = np.array([[3, 3, 3, 3, 3, 3]]) four_1d = np.array([4, 4, 4, 4, 4, 4]) five_0d = np.array(5) six_1d = np.array([6, 6, 6, 6, 6]) zero_2d = np.zeros((2, 6)) expected = np.array([[1, 1, 1, 2, 2, 2], [3, 3, 3, 3, 3, 3], [4, 4, 4, 4, 4, 4], [5, 6, 6, 6, 6, 6], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) result = block([[one_2d, two_2d], [three_2d], [four_1d], [five_0d, six_1d], [zero_2d]]) assert_equal(result, expected) def test_nested(self, block): one = np.array([1, 1, 1]) two = np.array([[2, 2, 2], [2, 2, 2], [2, 2, 2]]) three = np.array([3, 3, 3]) four = np.array([4, 4, 4]) five = np.array(5) six = np.array([6, 6, 6, 6, 6]) zero = np.zeros((2, 6)) result = block([ [ block([ [one], [three], [four] ]), two ], [five, six], [zero] ]) expected = np.array([[1, 1, 1, 2, 2, 2], [3, 3, 3, 2, 2, 2], [4, 4, 4, 2, 2, 2], [5, 6, 6, 6, 6, 6], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]) assert_equal(result, expected) def test_3d(self, block): a000 = np.ones((2, 2, 2), int) * 1 a100 = np.ones((3, 2, 2), int) * 2 a010 = np.ones((2, 3, 2), int) * 3 a001 = np.ones((2, 2, 3), int) * 4 a011 = np.ones((2, 3, 3), int) * 5 a101 = np.ones((3, 2, 3), int) * 6 a110 = np.ones((3, 3, 2), int) * 7 a111 = np.ones((3, 3, 3), int) * 8 result = block([ [ [a000, a001], [a010, a011], ], [ [a100, a101], [a110, a111], ] ]) expected = array([[[1, 1, 4, 4, 4], [1, 1, 4, 4, 4], [3, 3, 5, 5, 5], [3, 3, 5, 5, 5], [3, 3, 5, 5, 5]], [[1, 1, 4, 4, 4], [1, 1, 4, 4, 4], [3, 3, 5, 5, 5], [3, 3, 5, 5, 5], [3, 3, 5, 5, 5]], [[2, 2, 6, 6, 6], [2, 2, 6, 6, 6], [7, 7, 8, 8, 8], [7, 7, 8, 8, 8], [7, 7, 8, 8, 8]], [[2, 2, 6, 6, 6], [2, 2, 6, 6, 6], [7, 7, 8, 8, 8], [7, 7, 8, 8, 8], [7, 7, 8, 8, 8]], [[2, 2, 6, 6, 6], [2, 2, 6, 6, 6], [7, 7, 8, 8, 8], [7, 7, 8, 8, 8], [7, 7, 8, 8, 8]]]) assert_array_equal(result, expected) def test_block_with_mismatched_shape(self, block): a = np.array([0, 0]) b = np.eye(2) assert_raises(ValueError, block, [a, b]) assert_raises(ValueError, block, [b, a]) to_block = [[np.ones((2,3)), np.ones((2,2))], [np.ones((2,2)), np.ones((2,2))]] assert_raises(ValueError, block, to_block) def test_no_lists(self, block): assert_equal(block(1), np.array(1)) assert_equal(block(np.eye(3)), np.eye(3)) def test_invalid_nesting(self, block): msg = 'depths are mismatched' assert_raises_regex(ValueError, msg, block, [1, [2]]) assert_raises_regex(ValueError, msg, block, [1, []]) assert_raises_regex(ValueError, msg, block, [[1], 2]) assert_raises_regex(ValueError, msg, block, [[], 2]) assert_raises_regex(ValueError, msg, block, [ [[1], [2]], [[3, 4]], [5] # missing brackets ]) def test_empty_lists(self, block): assert_raises_regex(ValueError, 'empty', block, []) assert_raises_regex(ValueError, 'empty', block, [[]]) assert_raises_regex(ValueError, 'empty', block, [[1], []]) def test_tuple(self, block): assert_raises_regex(TypeError, 'tuple', block, ([1, 2], [3, 4])) assert_raises_regex(TypeError, 'tuple', block, [(1, 2), (3, 4)]) def test_different_ndims(self, block): a = 1. b = 2 * np.ones((1, 2)) c = 3 * np.ones((1, 1, 3)) result = block([a, b, c]) expected = np.array([[[1., 2., 2., 3., 3., 3.]]]) assert_equal(result, expected) def test_different_ndims_depths(self, block): a = 1. b = 2 * np.ones((1, 2)) c = 3 * np.ones((1, 2, 3)) result = block([[a, b], [c]]) expected = np.array([[[1., 2., 2.], [3., 3., 3.], [3., 3., 3.]]]) assert_equal(result, expected) def test_block_memory_order(self, block): # 3D arr_c = np.zeros((3,)*3, order='C') arr_f = np.zeros((3,)*3, order='F') b_c = [[[arr_c, arr_c], [arr_c, arr_c]], [[arr_c, arr_c], [arr_c, arr_c]]] b_f = [[[arr_f, arr_f], [arr_f, arr_f]], [[arr_f, arr_f], [arr_f, arr_f]]] assert block(b_c).flags['C_CONTIGUOUS'] assert block(b_f).flags['F_CONTIGUOUS'] arr_c = np.zeros((3, 3), order='C') arr_f = np.zeros((3, 3), order='F') # 2D b_c = [[arr_c, arr_c], [arr_c, arr_c]] b_f = [[arr_f, arr_f], [arr_f, arr_f]] assert block(b_c).flags['C_CONTIGUOUS'] assert block(b_f).flags['F_CONTIGUOUS'] def test_block_dispatcher(): class ArrayLike(object): pass a = ArrayLike() b = ArrayLike() c = ArrayLike() assert_equal(list(_block_dispatcher(a)), [a]) assert_equal(list(_block_dispatcher([a])), [a]) assert_equal(list(_block_dispatcher([a, b])), [a, b]) assert_equal(list(_block_dispatcher([[a], [b, [c]]])), [a, b, c]) # don't recurse into non-lists assert_equal(list(_block_dispatcher((a, b))), [(a, b)])
mit
-6,761,586,550,664,110,000
33.545584
87
0.478619
false
3.208653
true
false
false
auvsi-suas/interop
server/auvsi_suas/views/odlcs.py
1
16208
"""Odlcs view.""" from PIL import Image import io import json import logging import os import os.path import re from auvsi_suas.models.gps_position import GpsPosition from auvsi_suas.models.mission_config import MissionConfig from auvsi_suas.models.odlc import Odlc from auvsi_suas.proto import interop_admin_api_pb2 from auvsi_suas.proto import interop_api_pb2 from auvsi_suas.views.decorators import require_login from auvsi_suas.views.decorators import require_superuser from auvsi_suas.views.json import ProtoJsonEncoder from django.contrib.auth.models import User from django.core.files.images import ImageFile from django.http import HttpResponse from django.http import HttpResponseBadRequest from django.http import HttpResponseForbidden from django.http import HttpResponseNotFound from django.utils.decorators import method_decorator from django.views.generic import View from google.protobuf import json_format from sendfile import sendfile logger = logging.getLogger(__name__) ALPHANUMERIC_RE = re.compile(r"^[A-Z0-9]$") ODLC_MAX = 20 # Limit in the rules. ODLC_BUFFER = 2 # Buffer for swaps. ODLC_UPLOAD_LIMIT = (ODLC_MAX + ODLC_BUFFER) * 2 # Account for auto/not. def odlc_to_proto(odlc): """Converts an ODLC into protobuf format.""" odlc_proto = interop_api_pb2.Odlc() odlc_proto.id = odlc.pk odlc_proto.mission = odlc.mission.pk odlc_proto.type = odlc.odlc_type if odlc.location is not None: odlc_proto.latitude = odlc.location.latitude odlc_proto.longitude = odlc.location.longitude if odlc.orientation is not None: odlc_proto.orientation = odlc.orientation if odlc.shape is not None: odlc_proto.shape = odlc.shape if odlc.alphanumeric: odlc_proto.alphanumeric = odlc.alphanumeric if odlc.shape_color is not None: odlc_proto.shape_color = odlc.shape_color if odlc.alphanumeric_color is not None: odlc_proto.alphanumeric_color = odlc.alphanumeric_color if odlc.description: odlc_proto.description = odlc.description odlc_proto.autonomous = odlc.autonomous return odlc_proto def validate_odlc_proto(odlc_proto): """Validates ODLC proto, raising ValueError if invalid.""" if not odlc_proto.HasField('mission'): raise ValueError('ODLC mission is required.') try: MissionConfig.objects.get(pk=odlc_proto.mission) except MissionConfig.DoesNotExist: raise ValueError('Mission for ODLC does not exist.') if not odlc_proto.HasField('type'): raise ValueError('ODLC type is required.') if odlc_proto.HasField('latitude') != odlc_proto.HasField('longitude'): raise ValueError('Must specify both latitude and longitude.') if odlc_proto.HasField('latitude') and (odlc_proto.latitude < -90 or odlc_proto.latitude > 90): raise ValueError('Invalid latitude "%f", must be -90 <= lat <= 90' % odlc_proto.latitude) if odlc_proto.HasField('longitude') and (odlc_proto.longitude < -180 or odlc_proto.longitude > 180): raise ValueError('Invalid longitude "%s", must be -180 <= lat <= 180' % odlc_proto.longitude) if (odlc_proto.HasField('alphanumeric') and ALPHANUMERIC_RE.fullmatch(odlc_proto.alphanumeric) is None): raise ValueError('Alphanumeric is invalid.') def update_odlc_from_proto(odlc, odlc_proto): """Sets fields of the ODLC from the proto format.""" odlc.mission_id = odlc_proto.mission odlc.odlc_type = odlc_proto.type if odlc_proto.HasField('latitude') and odlc_proto.HasField('longitude'): if odlc.location is None: l = GpsPosition(latitude=odlc_proto.latitude, longitude=odlc_proto.longitude) l.save() odlc.location = l else: odlc.location.latitude = odlc_proto.latitude odlc.location.longitude = odlc_proto.longitude odlc.location.save() else: # Don't delete underlying GPS position in case it's shared by admin. # Just unreference it. odlc.location = None if odlc_proto.HasField('orientation'): odlc.orientation = odlc_proto.orientation else: odlc.orientation = None if odlc_proto.HasField('shape'): odlc.shape = odlc_proto.shape else: odlc.shape = None if odlc_proto.HasField('alphanumeric'): odlc.alphanumeric = odlc_proto.alphanumeric else: odlc.alphanumeric = '' if odlc_proto.HasField('shape_color'): odlc.shape_color = odlc_proto.shape_color else: odlc.shape_color = None if odlc_proto.HasField('alphanumeric_color'): odlc.alphanumeric_color = odlc_proto.alphanumeric_color else: odlc.alphanumeric_color = None if odlc_proto.HasField('description'): odlc.description = odlc_proto.description else: odlc.description = '' if odlc_proto.HasField('autonomous'): odlc.autonomous = odlc_proto.autonomous else: odlc.autonomous = False class Odlcs(View): """POST new odlc.""" @method_decorator(require_login) def dispatch(self, *args, **kwargs): return super(Odlcs, self).dispatch(*args, **kwargs) def get(self, request): # Restrict ODLCs to those for user, and optionally a mission. odlcs = Odlc.objects.filter(user=request.user) if 'mission' in request.GET: try: mission_id = int(request.GET['mission']) except: return HttpResponseBadRequest('Provided invalid mission ID.') odlcs = odlcs.filter(mission=mission_id) # Limit serving to 100 odlcs to prevent slowdown and isolation problems. odlcs = odlcs.all()[:100] odlc_protos = [odlc_to_proto(o) for o in odlcs] return HttpResponse(json.dumps(odlc_protos, cls=ProtoJsonEncoder), content_type="application/json") def post(self, request): odlc_proto = interop_api_pb2.Odlc() try: json_format.Parse(request.body, odlc_proto) except Exception as e: return HttpResponseBadRequest( 'Failed to parse request. Error: %s' % str(e)) # Validate ODLC proto fields. try: validate_odlc_proto(odlc_proto) except ValueError as e: return HttpResponseBadRequest(str(e)) # Cannot set ODLC ID on a post. if odlc_proto.HasField('id'): return HttpResponseBadRequest( 'Cannot specify ID for POST request.') # Check that there aren't too many ODLCs uploaded already. odlc_count = Odlc.objects.filter(user=request.user).filter( mission=odlc_proto.mission).count() if odlc_count >= ODLC_UPLOAD_LIMIT: return HttpResponseBadRequest( 'Reached upload limit for ODLCs for mission.') # Build the ODLC object from the request. odlc = Odlc() odlc.user = request.user update_odlc_from_proto(odlc, odlc_proto) odlc.save() return HttpResponse(json_format.MessageToJson(odlc_to_proto(odlc)), content_type="application/json") def find_odlc(request, pk): """Lookup requested Odlc model. Only the request's user's odlcs will be returned. Args: request: Request object pk: Odlc primary key Raises: Odlc.DoesNotExist: pk not found ValueError: Odlc not owned by this user. """ odlc = Odlc.objects.get(pk=pk) # We only let users get their own odlcs, unless a superuser. if odlc.user == request.user or request.user.is_superuser: return odlc else: raise ValueError("Accessing odlc %d not allowed" % pk) class OdlcsId(View): """Get or update a specific odlc.""" @method_decorator(require_login) def dispatch(self, *args, **kwargs): return super(OdlcsId, self).dispatch(*args, **kwargs) def get(self, request, pk): try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) return HttpResponse(json_format.MessageToJson(odlc_to_proto(odlc)), content_type="application/json") def put(self, request, pk): try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) odlc_proto = interop_api_pb2.Odlc() try: json_format.Parse(request.body, odlc_proto) except Exception as e: return HttpResponseBadRequest( 'Failed to parse request. Error: %s' % str(e)) # Validate ODLC proto fields. try: validate_odlc_proto(odlc_proto) except ValueError as e: return HttpResponseBadRequest(str(e)) # ID provided in proto must match object. if odlc_proto.HasField('id') and odlc_proto.id != odlc.pk: return HttpResponseBadRequest('ID in request does not match URL.') # Update the ODLC object from the request. update_odlc_from_proto(odlc, odlc_proto) odlc.update_last_modified() odlc.save() return HttpResponse(json_format.MessageToJson(odlc_to_proto(odlc)), content_type="application/json") def delete(self, request, pk): try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) # Remember the thumbnail path so we can delete it from disk. thumbnail = odlc.thumbnail.path if odlc.thumbnail else None odlc.delete() if thumbnail: try: os.remove(thumbnail) except OSError as e: logger.warning("Unable to delete thumbnail: %s", e) return HttpResponse("Odlc deleted.") class OdlcsIdImage(View): """Get or add/update odlc image.""" @method_decorator(require_login) def dispatch(self, *args, **kwargs): return super(OdlcsIdImage, self).dispatch(*args, **kwargs) def get(self, request, pk): try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) if not odlc.thumbnail or not odlc.thumbnail.name: return HttpResponseNotFound('Odlc %s has no image' % pk) # Tell sendfile to serve the thumbnail. return sendfile(request, odlc.thumbnail.path) def post(self, request, pk): try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) # Request body is the file f = io.BytesIO(request.body) # Verify that this is a valid image try: i = Image.open(f) i.verify() except IOError as e: return HttpResponseBadRequest(str(e)) if i.format not in ['JPEG', 'PNG']: return HttpResponseBadRequest( 'Invalid image format %s, only JPEG and PNG allowed' % (i.format)) # Clear thumbnail review state. if odlc.thumbnail_approved is not None: odlc.thumbnail_approved = None # Save the thumbnail, note old path. old_path = odlc.thumbnail.path if odlc.thumbnail else None odlc.thumbnail.save('%d.%s' % (odlc.pk, i.format), ImageFile(f)) # ODLC has been modified. odlc.update_last_modified() odlc.save() # Check whether old thumbnail should be deleted. Ignore errors. if old_path and odlc.thumbnail.path != old_path: try: os.remove(old_path) except OSError as e: logger.warning("Unable to delete old thumbnail: %s", e) return HttpResponse("Image uploaded.") def put(self, request, pk): """We simply make PUT do the same as POST.""" return self.post(request, pk) def delete(self, request, pk): try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) if not odlc.thumbnail or not odlc.thumbnail.path: return HttpResponseNotFound('Odlc %s has no image' % pk) # Clear thumbnail review state. if odlc.thumbnail_approved is not None: odlc.thumbnail_approved = None odlc.save() path = odlc.thumbnail.path # Remove the thumbnail from the odlc. # Note that this does not delete it from disk! odlc.thumbnail.delete() try: os.remove(path) except OSError as e: logger.warning("Unable to delete thumbnail: %s", e) return HttpResponse("Image deleted.") def odlc_to_review_proto(odlc): """Converts an ODLC into a review proto.""" review_proto = interop_admin_api_pb2.OdlcReview() review_proto.odlc.CopyFrom(odlc_to_proto(odlc)) review_proto.last_modified_timestamp = odlc.last_modified_time.isoformat() if odlc.thumbnail_approved is not None: review_proto.thumbnail_approved = odlc.thumbnail_approved if odlc.description_approved is not None: review_proto.description_approved = odlc.description_approved return review_proto def update_odlc_from_review_proto(odlc, review_proto): """Sets fields of the ODLC from the review.""" if review_proto.HasField('thumbnail_approved'): odlc.thumbnail_approved = review_proto.thumbnail_approved else: odlc.thumbnail_approved = False if review_proto.HasField('description_approved'): odlc.description_approved = review_proto.description_approved else: odlc.description_approved = False class OdlcsAdminReview(View): """Get or update review status for odlcs.""" @method_decorator(require_superuser) def dispatch(self, *args, **kwargs): return super(OdlcsAdminReview, self).dispatch(*args, **kwargs) def get(self, request): """Gets all of the odlcs ready for review.""" # Get all odlcs which have a thumbnail to review. odlcs = [t for t in Odlc.objects.all() if t.thumbnail] # Sort odlcs by last edit time. odlcs.sort(key=lambda t: t.last_modified_time) # Convert to review protos. odlc_review_protos = [odlc_to_review_proto(odlc) for odlc in odlcs] return HttpResponse(json.dumps(odlc_review_protos, cls=ProtoJsonEncoder), content_type="application/json") def put(self, request, pk): """Updates the review status of a odlc.""" review_proto = interop_admin_api_pb2.OdlcReview() try: json_format.Parse(request.body, review_proto) except Exception: return HttpResponseBadRequest('Failed to parse review proto.') try: odlc = find_odlc(request, int(pk)) except Odlc.DoesNotExist: return HttpResponseNotFound('Odlc %s not found' % pk) except ValueError as e: return HttpResponseForbidden(str(e)) update_odlc_from_review_proto(odlc, review_proto) odlc.save() return HttpResponse(json_format.MessageToJson( odlc_to_review_proto(odlc)), content_type="application/json")
apache-2.0
-4,131,356,739,532,436,000
34.234783
80
0.627159
false
3.744917
false
false
false
CoinEXchange/CoinX
coindb/db.py
1
2156
import peewee from peewee import * DEBUG = True db = MySQLDatabase(None) class COINModel(Model): class Meta: database = db class Order(COINModel): class Meta: db_table = 'order' ## CREATE TABLE `order` ( ## `address` varchar(40) NOT NULL DEFAULT '', ## `btc_address` varchar(40) DEFAULT NULL, ## `ltc_address` varchar(40) DEFAULT NULL, ## `order_created` timestamp NULL DEFAULT '0000-00-00 00:00:00', ## `order_type` varchar(1) DEFAULT NULL, ## `price` bigint(11) DEFAULT NULL, ## `amount` bigint(20) DEFAULT NULL, ## `amount_settled` bigint(20) DEFAULT NULL, ## `valid_until` timestamp NULL DEFAULT '0000-00-00 00:00:00', ## `confirmations` int(11) DEFAULT NULL, ## PRIMARY KEY (`address`) ## ) ENGINE=InnoDB DEFAULT CHARSET=latin1; ## CREATE INDEX created ON btcltc (order_created); ## CREATE VIEW orders AS SELECT * FROM btcltc WHERE confirmations > '5'; send_to_address = CharField(primary_key=True,max_length=40) sender_address = CharField(max_length=40, default="") receiver_address = CharField(max_length=40, default="") created = DateTimeField(default='0000-00-00 00:00:00') active = DateTimeField(default='0000-00-00 00:00:00') last_update = DateTimeField(default='0000-00-00 00:00:00') order_type = IntegerField(default=0) status = IntegerField(default=0) source = CharField(max_length=3, default=' ') target = CharField(max_length=3, default=' ') amount = BigIntegerField(default=0) amount_settled = BigIntegerField(default=0) price_ask = DecimalField(default=0) price_bought = DecimalField(default=0) amount_ask = BigIntegerField(default=0) amount_bought = BigIntegerField(default=0) amount_send = BigIntegerField(default=0) tid_send = CharField(max_length=40, default="") valid_until = DateTimeField(default='0000-00-00 00:00:00') confirmations = IntegerField(default=0) # status values STATUS_POOL_FREE = 0 STATUS_PREORDER = 100 STATUS_ACTIVE = 200 STATUS_SETTLED = 300 STATUS_TOPAY = 400 STATUS_PAYED = 500 STATUS_CONFIRMED = 550 STATUS_DELETE = 999
gpl-2.0
-7,011,038,118,931,080,000
31.179104
72
0.673469
false
3.32716
false
false
false
dbroudy/django-apollo
apollo/views.py
1
2271
from django.contrib.sites.models import get_current_site from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseBadRequest from django.shortcuts import render, get_object_or_404 import json import re from apollo.models import Page, Button, Survey, SurveyAnswer, Answer from apollo.forms import SurveyForm, SurveyAnswerFormSet _content_idx = re.compile('^(.+)\[(\d+)\]$') def page(request, slug): page = get_object_or_404(Page, slug=slug, site=get_current_site(request)) content = dict() arrays = dict() for c in page.content.all(): m = _content_idx.match(c.key) if m: base = m.group(1) idx = int(m.group(2)) if not base in arrays: arrays[base] = list() l = len(arrays[base]) if idx >= l: arrays[base] = arrays[base] + [''] * (idx-l+1) arrays[base][idx] = c.content else: content[c.key] = c.content for k,a in arrays.items(): content[k] = a #content = dict((c.key, c.content) for c in page.content.all()) return render(request, page.template, { 'content': content, 'buttons': page.buttons.all(), 'button_width': int(12 / page.buttons.count()), }) def register(request, button_id): button = Button.objects.get(id=button_id) button.clicks += 1 button.save() survey = Survey(button=button) survey.save() for q in button.questions.all(): survey.answers.create(question=q) form = SurveyForm(instance=survey) formset = SurveyAnswerFormSet(instance=survey) return render(request, 'apollo/confirm.html', { 'button': button, 'surveyform': form, 'answerform': formset }) def questions(request, survey_id): if request.method != 'POST': return HttpResponseNotAllowed(['POST']) survey = get_object_or_404(Survey, id=survey_id) form = SurveyForm(request.POST, instance=survey) if form.is_valid(): form.save() formset = SurveyAnswerFormSet(request.POST, instance=survey) formset.save() if not form.is_valid() or not formset.is_valid(): return render(request, 'apollo/forms.html', { 'surveyform': form, 'answerform': formset }, status=202) # return 200 when complete return HttpResponse(status=200)
mit
8,743,457,726,549,913,000
27.3875
84
0.646852
false
3.52093
false
false
false
spiralx/mypy
mypy/spiralx/rest.py
1
1666
#! python3 import logging import requests import re from urllib.parse import urlsplit, urlunsplit, urljoin _logger = logging.getLogger("spiralx.rest") _logger.setLevel(logging.DEBUG) # ------------------------------------------------------------- _has_scheme = lambda u: re.match(r"(?:https?:)?//", u) is not None def slack_urlsplit(url, scheme="http"): if not _has_scheme(url): url = "//" + url if url[-1] != "/": url += "/" return urlsplit(url, scheme=scheme) # ------------------------------------------------------------- class RestApi: """ Wrapper around REST API requests. >>> api = RestApi() >>> api.base http://localhost/ >>> api.get_url("photos") http://localhost/photos >>> api.get_url("photos", 1) http://localhost/photos/1 """ def __init__(self, url="http://localhost/"): self._base = urlunsplit(slack_urlsplit(url)) @property def base(self): return self._base def get_url(self, *params): path = "/".join(str(p) for p in params) return urljoin(self.base_url, path) # ------------------------------------------------------------- if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description="Make requests to a server with a REST API.") parser.add_argument("-u", "--uri", default="localhost", help="URI of the REST API") parser.add_argument("-t", "--host", default="localhost", help="") parser.add_argument("params", nargs="*", type=int, default=8000, help="Parameters to pass to the request") args = parser.parse_args() print(args)
mit
7,442,002,649,318,131,000
23.865672
110
0.528812
false
4.014458
false
false
false
chinapnr/fish_base
fishbase/fish_crypt.py
1
7173
# coding=utf-8 """ ``fish_crypt`` 包含的是一些加密、编码数据的函数,比如 MD5、SHA256 的计算。 原来这些方法属于 fish_common 模块, 因 fish_common 过于杂乱,故重新进行分类整理。 """ # 2019.01.21 v1.1.6 created by Hu Jun import hashlib import hmac import base64 # 2018.5.8 edit by David Yi, edit from Jia Chunying,#19026 # 2018.6.12 edit by Hu Jun, edit from Jia Chunying,#37 # 2018.10.28 edit by Hu Jun, #99 # 2019.01.06 edit by Hu Jun, #152 # 2019.01.21 v1.1.6 edit by Hu Jun, #200 move fish_common.FishMD5 to fish_crypt.FishMD5 class FishMD5(object): """ 计算普通字符串和一般的文件,对于大文件采取逐步读入的方式,也可以快速计算;基于 Python 的 hashlib.md5() 进行封装和扩展; 举例如下:: print('--- md5 demo ---') print('string md5:', GetMD5.string('hello world!')) file_path = get_abs_filename_with_sub_path('test_conf', 'test_conf.ini')[1]) print('file md5:', GetMD5.file(file_path) big_file_path = get_abs_filename_with_sub_path('test_conf', 'test_conf.ini')[1]) print('big file md5:', GetMD5.big_file(big_file_path) print('string hmac_md5:', GetMD5.hmac_md5('hello world!', 'salt')) print('---') 执行结果:: --- md5 demo --- string md5: fc3ff98e8c6a0d3087d515c0473f8677 file md5: fb7528c9778b2377e30b0f7e4c26fef0 big file md5: fb7528c9778b2377e30b0f7e4c26fef0 string hmac_md5: 191f82804523bfdafe0188bbbddd6587 --- """ @staticmethod def string(s, salt=None): """ 获取一个字符串的 MD5 值 :param: * s: (string) 需要进行 hash 的字符串 * salt: (string) 随机字符串,默认为 None :return: * result: (string) 32 位小写 MD5 值 """ m = hashlib.md5() s = s.encode('utf-8') + salt.encode('utf-8') if salt is not None else s.encode('utf-8') m.update(s) result = m.hexdigest() return result @staticmethod def file(filename): """ 获取一个文件的 MD5 值 :param: * filename: (string) 需要进行 hash 的文件名 :return: * result: (string) 32位小写 MD5 值 """ m = hashlib.md5() with open(filename, 'rb') as f: m.update(f.read()) result = m.hexdigest() return result @staticmethod def big_file(filename): """ 获取一个大文件的 MD5 值 :param: * filename: (string) 需要进行 hash 的大文件路径 :return: * result: (string) 32位小写 MD5 值 """ md5 = hashlib.md5() with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(8192), b''): md5.update(chunk) result = md5.hexdigest() return result @staticmethod def hmac_md5(s, salt): """ 获取一个字符串的 使用 salt 加密的 hmac MD5 值 :param: * s: (string) 需要进行 hash 的字符串 * salt: (string) 随机字符串 :return: * result: (string) 32位小写 MD5 值 """ hmac_md5 = hmac.new(salt.encode('utf-8'), s.encode('utf-8'), digestmod=hashlib.md5).hexdigest() return hmac_md5 # v1.0.14 edit by Hu Jun, #59 # 2019.01.21 v1.1.6 edit by Hu Jun, #200 move fish_common.Base64 to fish_crypt.Base64 class FishBase64(object): """ 计算返回文件和字符串的 base64 编码字符串 举例如下:: print('--- FishBase64 demo ---') print('string base64:', FishBase64.string('hello world!')) file_path = get_abs_filename_with_sub_path('test_conf', 'test_conf.ini')[1]) print('file base64:', FishBase64.file(file_path) print('decode base64:', Base64.decode(b'aGVsbG8gd29ybGQ=')) print('---') 执行结果:: --- FishBase64 demo --- string base64: b'aGVsbG8gd29ybGQ=' file base64: (b'IyEvYmluL2Jhc2gKCmNkIC9yb290L3d3dy9zaW5nbGVfcWEKCm5vaHVwIC9yb2 90L2FwcC9weXRob24zNjIvYmluL2d1bmljb3JuIC1jIGd1bmljb3JuLmNvbmYgc2luZ2xlX3NlcnZlcjphcHAK') decode base64: b'hello world' --- """ @staticmethod def string(s): """ 获取一个字符串的 base64 值 :param: * s: (string) 需要进行 base64 编码 的字符串 :return: * (bytes) base64 编码结果 """ return base64.b64encode(s.encode('utf-8')) @staticmethod def file(filename): """ 获取一个文件的 base64 值 :param: * filename: (string) 需要进行 base64 编码 文件路径 :return: * (bytes) base64 编码结果 """ with open(filename, 'rb') as f: return base64.b64encode(f.read()) @staticmethod def decode(s): """ 获取 base64 解码结果 :param: * filename: (string) 需要进行 base64 编码 文件路径 :return: * (bytes) base64 解码结果 """ return base64.b64decode(s) # v1.1.3 edit by Hu Jun, #100 # 2019.01.06 v1.1.6 edit by Hu Jun, #152 # 2019.01.21 v1.1.6 edit by Hu Jun, #200 move fish_common.FishSha256 to fish_crypt.FishSha256 class FishSha256(object): """ 计算字符串和密钥的 sha256 算法哈希值 举例如下:: print('--- GetSha256 demo ---') # 定义哈希字符串 message = 'Hello HMAC' # 定义密钥 secret = '12345678' print('hmac_sha256:', GetSha256.hmac_sha256(secret, message)) print('hashlib_sha256:', GetSha256.hashlib_sha256(message)) print('---') 执行结果:: --- GetSha256 demo --- hmac_sha256: 5eb8bdabdaa43f61fb220473028e49d40728444b4322f3093decd9a356afd18f hashlib_sha256: 4a1601381dfb85d6e713853a414f6b43daa76a82956911108512202f5a1c0ce4 --- """ @staticmethod def hmac_sha256(secret, message): """ 获取一个字符串的在密钥 secret 加密下的 sha256 哈希值 :param: * secret: (string) 哈希算法的密钥 * message: (string) 需要进行哈希的字符串 :return: * hashed_str: sha256 算法哈希值 """ hashed_str = hmac.new(secret.encode('utf-8'), message.encode('utf-8'), digestmod=hashlib.sha256).hexdigest() return hashed_str @staticmethod def hashlib_sha256(message): """ 获取一个字符串的 sha256 哈希值 :param: * message: (string) 需要进行哈希的字符串 :return: * hashed_str: sha256 算法哈希值 """ hashlib_sha256 = hashlib.sha256() hashlib_sha256.update(message.encode('utf-8')) hashed_str = hashlib_sha256.hexdigest() return hashed_str
mit
3,705,349,012,075,000,300
26.469828
96
0.555625
false
2.562525
true
false
false
mirestrepo/voxels-at-lems
bvpl/bvpl_octree/PCA/release/reconstruction_error/compute_pca_error_scene.py
1
5454
# -*- coding: utf-8 -*- """ Created on Mon Feb 14, 2011 @author:Isabel Restrepo Compuets PCA reconstruction error. Each block is processed in a separate thread. This script assumes that the pca basis has been computed as gone by extract_pca_kernels.py """ import os; import bvpl_octree_batch import multiprocessing import Queue import time import random import optparse import sys #time.sleep(30); class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string class pca_error_job(): def __init__(self, pca_info, pca_error_scenes, block_i, block_j, block_k, dim): self.pca_info = pca_info; self.pca_error_scenes = pca_error_scenes; self.block_i = block_i; self.block_j = block_j; self.block_k = block_k; self.dim=dim; def execute_jobs(jobs, num_procs=4): work_queue=multiprocessing.Queue(); result_queue=multiprocessing.Queue(); for job in jobs: work_queue.put(job) for i in range(num_procs): worker= pca_error_worker(work_queue,result_queue) worker.start(); print("worker with name ",worker.name," started!") class pca_error_worker(multiprocessing.Process): def __init__(self,work_queue,result_queue): # base class initialization multiprocessing.Process.__init__(self) # job management stuff self.work_queue = work_queue self.result_queue = result_queue self.kill_received = False def run(self): while not self.kill_received: # get a task try: job = self.work_queue.get_nowait() except Queue.Empty: break start_time = time.time(); print("Computing Error Scene"); bvpl_octree_batch.init_process("bvplComputePCAErrorBlockProcess"); bvpl_octree_batch.set_input_from_db(0,job.pca_info); bvpl_octree_batch.set_input_from_db(1,job.pca_error_scenes); bvpl_octree_batch.set_input_int(2, job.block_i); bvpl_octree_batch.set_input_int(3, job.block_j); bvpl_octree_batch.set_input_int(4, job.block_k); bvpl_octree_batch.set_input_unsigned(5, job.dim); bvpl_octree_batch.run_process(); print ("Runing time for worker:", self.name) print(time.time() - start_time); #*******************The Main Algorithm ************************# if __name__=="__main__": bvpl_octree_batch.register_processes(); bvpl_octree_batch.register_datatypes(); #Parse inputs parser = optparse.OptionParser(description='Compute PCA Error Scene'); parser.add_option('--model_dir', action="store", dest="model_dir"); parser.add_option('--pca_dir', action="store", dest="pca_dir"); parser.add_option('--num_cores', action="store", dest="num_cores", type="int", default=4); parser.add_option('--nblocks_x', action="store", dest="nblocks_x", type="int"); parser.add_option('--nblocks_y', action="store", dest="nblocks_y", type="int"); parser.add_option('--nblocks_z', action="store", dest="nblocks_z", type="int"); parser.add_option('--dimension', action="store", dest="dimension", type="int"); options, args = parser.parse_args() model_dir = options.model_dir; pca_dir = options.pca_dir; nblocks_x = options.nblocks_x; nblocks_y = options.nblocks_y; nblocks_z = options.nblocks_z; num_cores = options.num_cores; dimension = options.dimension; if not os.path.isdir(model_dir +"/"): print "Invalid Model Dir" sys.exit(-1); if not os.path.isdir(pca_dir +"/"): print "Invalid PCA Dir" sys.exit(-1); print("Loading Data Scene"); bvpl_octree_batch.init_process("boxmCreateSceneProcess"); bvpl_octree_batch.set_input_string(0, model_dir +"/mean_color_scene.xml"); bvpl_octree_batch.run_process(); (scene_id, scene_type) = bvpl_octree_batch.commit_output(0); data_scene= dbvalue(scene_id, scene_type); #Load pca scenes pca_feature_dim = 125; print("Loading PCA Error Scenes"); bvpl_octree_batch.init_process("bvplLoadPCAErrorSceneProcess"); bvpl_octree_batch.set_input_from_db(0, data_scene); bvpl_octree_batch.set_input_string(1, pca_dir); bvpl_octree_batch.set_input_unsigned(2, pca_feature_dim); #dimension pca feature bvpl_octree_batch.run_process(); (id, type) = bvpl_octree_batch.commit_output(0); pca_scenes = dbvalue(id, type); print("Loading PCA Info"); bvpl_octree_batch.init_process("bvplLoadPCAInfoProcess"); bvpl_octree_batch.set_input_string(0, pca_dir); bvpl_octree_batch.run_process(); (id, type) = bvpl_octree_batch.commit_output(0); pca_info = dbvalue(id, type); #Begin multiprocessing work_queue=multiprocessing.Queue(); job_list=[]; #Enqueue jobs all_indeces=[] for block_i in range(0,nblocks_x): for block_j in range(0,nblocks_y): for block_k in range(0,nblocks_z): idx = [block_i, block_j, block_k]; all_indeces.append(idx); random.shuffle(all_indeces); for i in range (0, len(all_indeces)): idx = all_indeces[i]; current_job = pca_error_job(pca_info, pca_scenes, idx[0], idx[1], idx[2], dimension); job_list.append(current_job); execute_jobs(job_list, num_cores);
bsd-2-clause
-1,480,236,010,235,916,800
32.060606
92
0.623029
false
3.301453
false
false
false
SKIRT/PTS
evolve/core/utils.py
1
11501
#!/usr/bin/env python # -*- coding: utf8 -*- # ***************************************************************** # ** PTS -- Python Toolkit for working with SKIRT ** # ** © Astronomical Observatory, Ghent University ** # ***************************************************************** ## \package pts.evolve.utils This is the utility module, with some utility functions of general # use, like list item swap, random utilities and etc. # ----------------------------------------------------------------- # Import standard modules from math import sqrt as math_sqrt # Import the relevant PTS classes and modules from ...core.basics.log import log from ...core.tools.random import prng # ----------------------------------------------------------------- def randomFlipCoin(p): """Returns True with the *p* probability. If *p* is 1, the function will always return True. If *p* is 0, the function will return always False. Example: >>> randomFlipCoin(1.0) True :param p: probability, between 0.0 and 1.0 :rtype: True or False """ if p == 1.0: return True if p == 0.0: return False return prng.random_sample() <= p # ----------------------------------------------------------------- def listSwapElement(lst, indexa, indexb): """ Swaps elements A and B in a list. Example: >>> l = [1, 2, 3] >>> listSwapElement(l, 1, 2) >>> l [1, 3, 2] :param lst: the list :param indexa: the swap element A :param indexb: the swap element B :rtype: None """ lst[indexa], lst[indexb] = lst[indexb], lst[indexa] # ----------------------------------------------------------------- def list2DSwapElement(lst, indexa, indexb): """ Swaps elements A and B in a 2D list (matrix). Example: >>> l = [ [1,2,3], [4,5,6] ] >>> list2DSwapElement(l, (0,1), (1,1) ) >>> l [[1, 5, 3], [4, 2, 6]] :param lst: the list :param indexa: the swap element A :param indexb: the swap element B :rtype: None """ temp = lst[indexa[0]][indexa[1]] lst[indexa[0]][indexa[1]] = lst[indexb[0]][indexb[1]] lst[indexb[0]][indexb[1]] = temp # ----------------------------------------------------------------- def raiseException(message, expt=None): """ Raise an exception and logs the message. Example: >>> raiseException('The value is not an integer', ValueError) :param message: the message of exception :param expt: the exception class :rtype: None """ log.critical(message) if expt is None: raise Exception(message) else: raise (expt, message) # ----------------------------------------------------------------- def cmp_individual_raw(a, b): """ Compares two individual raw scores Example: >>> cmp_individual_raw(a, b) :param a: the A individual instance :param b: the B individual instance :rtype: 0 if the two individuals raw score are the same, -1 if the B individual raw score is greater than A and 1 if the A individual raw score is greater than B. .. note:: this function is used to sorte the population individuals """ if a.score < b.score: return -1 if a.score > b.score: return 1 return 0 # ----------------------------------------------------------------- def cmp_individual_scaled(a, b): """ Compares two individual fitness scores, used for sorting population Example: >>> cmp_individual_scaled(a, b) :param a: the A individual instance :param b: the B individual instance :rtype: 0 if the two individuals fitness score are the same, -1 if the B individual fitness score is greater than A and 1 if the A individual fitness score is greater than B. .. note:: this function is used to sort the population individuals """ if a.fitness < b.fitness: return -1 if a.fitness > b.fitness: return 1 return 0 # ----------------------------------------------------------------- def importSpecial(name): """ This function will import the *name* module, if fails, it will raise an ImportError exception and a message :param name: the module name :rtype: the module object .. versionadded:: 0.6 The *import_special* function """ from . import constants try: imp_mod = __import__(name) except ImportError: raiseException("Cannot import module %s: %s" % (name, constants.CDefImportList[name]), expt=ImportError) return imp_mod # ----------------------------------------------------------------- class ErrorAccumulator(object): """ An accumulator for the Root Mean Square Error (RMSE) and the Mean Square Error (MSE) """ def __init__(self): """ The constructor ... """ self.acc = 0.0 self.acc_square = 0.0 self.acc_len = 0 # ----------------------------------------------------------------- def reset(self): """ Reset the accumulator """ self.acc_square = 0.0 self.acc = 0.0 self.acc_len = 0 # ----------------------------------------------------------------- def append(self, target, evaluated): """ Add value to the accumulator :param target: the target value :param evaluated: the evaluated value """ self.acc_square += (target - evaluated) ** 2 self.acc += abs(target - evaluated) self.acc_len += 1 # ----------------------------------------------------------------- def __iadd__(self, value): """ The same as append, but you must pass a tuple """ self.append(*value) return self # ----------------------------------------------------------------- def getMean(self): """ Return the mean of the non-squared accumulator """ return self.acc / self.acc_len # ----------------------------------------------------------------- def getSquared(self): """ Returns the squared accumulator """ return self.acc_square # ----------------------------------------------------------------- def getNonSquared(self): """ Returns the non-squared accumulator """ return self.acc # ----------------------------------------------------------------- def getAdjusted(self): """ Returns the adjusted fitness This fitness is calculated as 1 / (1 + standardized fitness) """ return 1.0 / (1.0 + self.acc) # ----------------------------------------------------------------- def getRMSE(self): """ Return the root mean square error :rtype: float RMSE """ return math_sqrt(self.acc_square / float(self.acc_len)) # ----------------------------------------------------------------- def getMSE(self): """ Return the mean square error :rtype: float MSE """ return self.acc_square / float(self.acc_len) # ----------------------------------------------------------------- class Graph(object): """ The Graph class Example: >>> g = Graph() >>> g.addEdge("a", "b") >>> g.addEdge("b", "c") >>> for node in g: ... print node a b c .. versionadded:: 0.6 The *Graph* class. """ def __init__(self): """ The constructor """ self.adjacent = {} # ----------------------------------------------------------------- def __iter__(self): """ Returns an iterator to the all graph elements """ return iter(self.adjacent) # ----------------------------------------------------------------- def addNode(self, node): """ Add the node :param node: the node to add """ if node not in self.adjacent: self.adjacent[node] = {} # ----------------------------------------------------------------- def __iadd__(self, node): """ Add a node using the += operator """ self.addNode(node) return self # ----------------------------------------------------------------- def addEdge(self, a, b): """ Add an edge between two nodes, if the nodes doesn't exists, they will be created :param a: the first node :param b: the second node """ if a not in self.adjacent: self.adjacent[a] = {} if b not in self.adjacent: self.adjacent[b] = {} self.adjacent[a][b] = True self.adjacent[b][a] = True # ----------------------------------------------------------------- def getNodes(self): """ Returns all the current nodes on the graph :rtype: the list of nodes """ return self.adjacent.keys() # ----------------------------------------------------------------- def reset(self): """ Deletes all nodes of the graph """ self.adjacent.clear() # ----------------------------------------------------------------- def getNeighbors(self, node): """ Returns the neighbors of the node :param node: the node """ return self.adjacent[node].keys() # ----------------------------------------------------------------- def __getitem__(self, node): """ Returns the adjacent nodes of the node """ return self.adjacent[node].keys() # ----------------------------------------------------------------- def __repr__(self): ret = "- Graph\n" ret += "\tNode list:\n" for node in self: ret += "\t\tNode [%s] = %s\n" % (node, self.getNeighbors(node)) return ret # ----------------------------------------------------------------- def G1DListGetEdgesComposite(mom, dad): """ Get the edges and the merge between the edges of two G1DList individuals :param mom: the mom G1DList individual :param dad: the dad G1DList individual :rtype: a tuple (mom edges, dad edges, merge) """ mom_edges = G1DListGetEdges(mom) dad_edges = G1DListGetEdges(dad) return mom_edges, dad_edges, G1DListMergeEdges(mom_edges, dad_edges) # ----------------------------------------------------------------- def G1DListGetEdges(individual): """ Get the edges of a G1DList individual :param individual: the G1DList individual :rtype: the edges dictionary """ edg = {} ind_list = individual.getInternalList() for i in xrange(len(ind_list)): a, b = ind_list[i], ind_list[i - 1] if a not in edg: edg[a] = [] else: edg[a].append(b) if b not in edg: edg[b] = [] else: edg[b].append(a) return edg # ----------------------------------------------------------------- def G1DListMergeEdges(eda, edb): """ Get the merge between the two individual edges :param eda: the edges of the first G1DList genome :param edb: the edges of the second G1DList genome :rtype: the merged dictionary """ edges = {} for value, near in eda.items(): for adj in near: if (value in edb) and (adj in edb[value]): edges.setdefault(value, []).append(adj) return edges # -----------------------------------------------------------------
agpl-3.0
-2,755,724,891,698,071,600
24.612472
112
0.453565
false
4.440154
false
false
false
aetilley/revscoring
setup.py
1
1234
import os from setuptools import find_packages, setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() def requirements(fname): return [line.strip() for line in open(os.path.join(os.path.dirname(__file__), fname))] setup( name="revscoring", version="0.4.10", # change in revscoring/__init__.py author="Aaron Halfaker", author_email="ahalfaker@wikimedia.org", description=("A set of utilities for generating quality scores for " + \ "MediaWiki revisions"), license="MIT", entry_points = { 'console_scripts': [ 'revscoring = revscoring.revscoring:main', ], }, url="https://github.com/halfak/Revision-Scores", packages=find_packages(), long_description=read('README.rst'), install_requires=requirements("requirements.txt"), classifiers=[ "Development Status :: 3 - Alpha", "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent" ], )
mit
-5,696,906,683,495,392,000
29.85
77
0.60859
false
4.14094
false
false
false
PaddlePaddle/Paddle
python/paddle/fluid/tests/unittests/test_conv2d_op.py
1
35379
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np import paddle import paddle.fluid.core as core import paddle.fluid as fluid from op_test import OpTest, convert_float_to_uint16, get_numeric_gradient from paddle.fluid.tests.unittests.testsuite import create_op from paddle.fluid import Program, program_guard def conv2d_forward_naive(input, filter, group, conv_param, padding_algorithm='EXPLICIT', data_format='NCHW'): if padding_algorithm not in ["SAME", "VALID", "EXPLICIT"]: raise ValueError("Unknown Attr(padding_algorithm): '%s'. " "It can only be 'SAME' or 'VALID'." % str(padding_algorithm)) if data_format not in ["NCHW", "NHWC"]: raise ValueError("Unknown Attr(data_format): '%s' ." "It can only be 'NCHW' or 'NHWC'." % str(data_format)) channel_last = (data_format == "NHWC") if channel_last: input = np.transpose(input, [0, 3, 1, 2]) in_n, in_c, in_h, in_w = input.shape f_n, f_c, f_h, f_w = filter.shape out_n = in_n out_c = f_n assert f_c * group == in_c assert np.mod(out_c, group) == 0 sub_out_c = out_c // group sub_f_n = f_n // group stride, pad, dilation = conv_param['stride'], conv_param['pad'], conv_param[ 'dilation'] # update pad and dilation def _get_padding_with_SAME(input_shape, pool_size, pool_stride): padding = [] for input_size, filter_size, stride_size in zip(input_shape, pool_size, pool_stride): out_size = int((input_size + stride_size - 1) / stride_size) pad_sum = np.max(( (out_size - 1) * stride_size + filter_size - input_size, 0)) pad_0 = int(pad_sum / 2) pad_1 = int(pad_sum - pad_0) padding.append(pad_0) padding.append(pad_1) return padding ksize = filter.shape[2:4] if padding_algorithm == "VALID": pad = [0, 0, 0, 0] elif padding_algorithm == "SAME": dilation = [1, 1] input_data_shape = input.shape[2:4] pad = _get_padding_with_SAME(input_data_shape, ksize, stride) pad_h_0, pad_h_1 = pad[0], pad[0] pad_w_0, pad_w_1 = pad[1], pad[1] if len(pad) == 4: pad_h_0, pad_h_1 = pad[0], pad[1] pad_w_0, pad_w_1 = pad[2], pad[3] out_h = 1 + (in_h + pad_h_0 + pad_h_1 - (dilation[0] * (f_h - 1) + 1)) // stride[0] out_w = 1 + (in_w + pad_w_0 + pad_w_1 - (dilation[1] * (f_w - 1) + 1)) // stride[1] out = np.zeros((out_n, out_c, out_h, out_w)) d_bolck_h = (dilation[0] * (f_h - 1) + 1) d_bolck_w = (dilation[1] * (f_w - 1) + 1) input_pad = np.pad(input, ((0, 0), (0, 0), (pad_h_0, pad_h_1), (pad_w_0, pad_w_1)), mode='constant', constant_values=0) filter_dilation = np.zeros((f_n, f_c, d_bolck_h, d_bolck_w)) filter_dilation[:, :, 0:d_bolck_h:dilation[0], 0:d_bolck_w:dilation[ 1]] = filter for i in range(out_h): for j in range(out_w): for g in range(group): input_pad_masked = \ input_pad[:, g * f_c:(g + 1) * f_c, i * stride[0]:i * stride[0] + d_bolck_h, j * stride[1]:j * stride[1] + d_bolck_w] f_sub = filter_dilation[g * sub_f_n:(g + 1) * sub_f_n, :, :, :] # sub_f_n == sub_out_c for k in range(sub_out_c): # Multiplication of Corresponding Elements, then sum all out[:, g * sub_out_c + k, i, j] = \ np.sum(input_pad_masked * f_sub[k, :, :, :], axis=(1, 2, 3)) if channel_last: out = np.transpose(out, [0, 2, 3, 1]) return out, in_n, out_h, out_w, out_c def create_test_cudnn_class(parent): @unittest.skipIf(not core.is_compiled_with_cuda(), "core is not compiled with CUDA") class TestCUDNNCase(parent): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float32 if core.is_compiled_with_rocm( ) else np.float64 cls_name = "{0}_{1}".format(parent.__name__, "CUDNN") TestCUDNNCase.__name__ = cls_name globals()[cls_name] = TestCUDNNCase def create_test_cudnn_fp16_class(parent, grad_check=True): @unittest.skipIf(not core.is_compiled_with_cuda(), "core is not compiled with CUDA") class TestConv2DCUDNNFp16(parent): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=2e-2) def test_check_grad_no_filter(self): place = core.CUDAPlace(0) if core.is_float16_supported(place) and grad_check: self.check_grad_with_place( place, ['Input'], 'Output', no_grad_set=set(['Filter'])) def test_check_grad_no_input(self): place = core.CUDAPlace(0) if core.is_float16_supported(place) and grad_check: self.check_grad_with_place( place, ['Filter'], 'Output', no_grad_set=set(['Input'])) cls_name = "{0}_{1}".format(parent.__name__, "CUDNNFp16") TestConv2DCUDNNFp16.__name__ = cls_name globals()[cls_name] = TestConv2DCUDNNFp16 def create_test_cudnn_bf16_class(parent): @unittest.skipIf( not core.is_compiled_with_cuda() or core.cudnn_version() < 8100, "core is not compiled with CUDA and cudnn version need larger than 8.1.0" ) class TestConv2DCUDNNBF16(parent): def get_numeric_grad(self, place, check_name): scope = core.Scope() self._check_grad_helper() op = create_op(scope, self.op_type, self.inputs, self.outputs, self.attrs) return get_numeric_gradient(place, scope, op, self.inputs_fp32, check_name, ['Output']) def init_kernel_type(self): self.use_cudnn = True self.no_need_check_grad = True self.dtype = np.uint16 def test_check_output(self): place = core.CUDAPlace(0) self.check_output_with_place(place, atol=1e-2) def test_check_grad_no_filter(self): place = core.CUDAPlace(0) numeric_grads = self.get_numeric_grad(place, 'Input') self.check_grad_with_place( place, ['Input'], 'Output', no_grad_set=set(['Filter']), user_defined_grads=[numeric_grads]) def test_check_grad_no_input(self): place = core.CUDAPlace(0) numeric_grads = self.get_numeric_grad(place, 'Filter') self.check_grad_with_place( place, ['Filter'], 'Output', no_grad_set=set(['Input']), user_defined_grads=[numeric_grads]) cls_name = "{0}_{1}".format(parent.__name__, "CUDNNBF16") TestConv2DCUDNNBF16.__name__ = cls_name globals()[cls_name] = TestConv2DCUDNNBF16 def create_test_channel_last_class(parent): class TestChannelLastCase(parent): def init_data_format(self): self.data_format = "NHWC" def init_test_case_2(self): N, C, H, W = self.input_size self.input_size = [N, H, W, C] cls_name = "{0}_{1}".format(parent.__name__, "ChannelLast") TestChannelLastCase.__name__ = cls_name globals()[cls_name] = TestChannelLastCase def create_test_cudnn_channel_last_class(parent): @unittest.skipIf(not core.is_compiled_with_cuda(), "core is not compiled with CUDA") class TestCudnnChannelLastCase(parent): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float32 if core.is_compiled_with_rocm( ) else np.float64 def init_data_format(self): self.data_format = "NHWC" def init_test_case_2(self): N, C, H, W = self.input_size self.input_size = [N, H, W, C] cls_name = "{0}_{1}".format(parent.__name__, "CudnnChannelLast") TestCudnnChannelLastCase.__name__ = cls_name globals()[cls_name] = TestCudnnChannelLastCase def create_test_cudnn_channel_last_fp16_class(parent, grad_check=True): @unittest.skipIf(not core.is_compiled_with_cuda(), "core is not compiled with CUDA") class TestCudnnChannelLastFp16(parent): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float16 def test_check_output(self): if core.is_compiled_with_cuda(): place = core.CUDAPlace(0) if core.is_float16_supported(place): self.check_output_with_place(place, atol=2e-2) def test_check_grad_no_filter(self): place = core.CUDAPlace(0) if core.is_float16_supported(place) and grad_check: self.check_grad_with_place( place, ['Input'], 'Output', no_grad_set=set(['Filter'])) def test_check_grad_no_input(self): place = core.CUDAPlace(0) if core.is_float16_supported(place) and grad_check: self.check_grad_with_place( place, ['Filter'], 'Output', no_grad_set=set(['Input'])) def init_data_format(self): self.data_format = "NHWC" def init_test_case_2(self): N, C, H, W = self.input_size self.input_size = [N, H, W, C] cls_name = "{0}_{1}".format(parent.__name__, "CudnnChannelLastFp16") TestCudnnChannelLastFp16.__name__ = cls_name globals()[cls_name] = TestCudnnChannelLastFp16 def create_test_padding_SAME_class(parent): class TestPaddingSMAECase(parent): def init_paddings(self): self.pad = [0, 0] self.padding_algorithm = "SAME" cls_name = "{0}_{1}".format(parent.__name__, "PaddingSAMEOp") TestPaddingSMAECase.__name__ = cls_name globals()[cls_name] = TestPaddingSMAECase def create_test_padding_VALID_class(parent): class TestPaddingVALIDCase(parent): def init_paddings(self): self.pad = [1, 1] self.padding_algorithm = "VALID" cls_name = "{0}_{1}".format(parent.__name__, "PaddingVALIDOp") TestPaddingVALIDCase.__name__ = cls_name globals()[cls_name] = TestPaddingVALIDCase def create_test_cudnn_padding_SAME_class(parent): @unittest.skipIf(not core.is_compiled_with_cuda(), "core is not compiled with CUDA") class TestCUDNNPaddingSMAECase(parent): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float32 if core.is_compiled_with_rocm( ) else np.float64 def init_paddings(self): self.pad = [1, 1] self.padding_algorithm = "SAME" cls_name = "{0}_{1}".format(parent.__name__, "CudnnPaddingSAMEOp") TestCUDNNPaddingSMAECase.__name__ = cls_name globals()[cls_name] = TestCUDNNPaddingSMAECase def create_test_cudnn_padding_VALID_class(parent): @unittest.skipIf(not core.is_compiled_with_cuda(), "core is not compiled with CUDA") class TestCUDNNPaddingVALIDCase(parent): def init_kernel_type(self): self.use_cudnn = True self.dtype = np.float32 if core.is_compiled_with_rocm( ) else np.float64 def init_paddings(self): self.pad = [1, 1] self.padding_algorithm = "VALID" cls_name = "{0}_{1}".format(parent.__name__, "CudnnPaddingVALIDOp") TestCUDNNPaddingVALIDCase.__name__ = cls_name globals()[cls_name] = TestCUDNNPaddingVALIDCase class TestConv2DOp(OpTest): def setUp(self): self.op_type = "conv2d" self.use_cudnn = False self.exhaustive_search = False self.use_cuda = False self.use_mkldnn = False self.fuse_relu_before_depthwise_conv = False self.data_format = "AnyLayout" self.dtype = np.float64 self.init_kernel_type() self.init_group() self.init_dilation() self.init_test_case() conv2d_param = { 'stride': self.stride, 'pad': self.pad, 'dilation': self.dilations } if self.is_bfloat16_op(): input = np.random.random(self.input_size).astype(np.float32) filter = np.random.uniform(-1, 1, self.filter_size).astype(np.float32) else: input = np.random.random(self.input_size).astype(self.dtype) filter = np.random.uniform(-1, 1, self.filter_size).astype(self.dtype) if not self.has_cuda(): self.fuse_relu_before_depthwise_conv = False if self.fuse_relu_before_depthwise_conv: input = input - 0.5 input -= (input < 0) * 0.1 input += (input >= 0) * 0.1 input2 = np.maximum(input, 0.0) else: input2 = input output, _, _, _, _ = conv2d_forward_naive(input2, filter, self.groups, conv2d_param) if self.is_bfloat16_op(): output = output.astype(np.float32) self.inputs = { 'Input': convert_float_to_uint16(input), 'Filter': convert_float_to_uint16(filter) } self.inputs_fp32 = { 'Input': OpTest.np_dtype_to_fluid_dtype(input), 'Filter': OpTest.np_dtype_to_fluid_dtype(filter) } else: output = output.astype(self.dtype) self.inputs = { 'Input': OpTest.np_dtype_to_fluid_dtype(input), 'Filter': OpTest.np_dtype_to_fluid_dtype(filter) } self.attrs = { 'strides': self.stride, 'paddings': self.pad, 'groups': self.groups, 'dilations': self.dilations, 'use_cudnn': self.use_cudnn, 'use_mkldnn': self.use_mkldnn, 'data_format': self.data_format, 'fuse_relu_before_depthwise_conv': self.fuse_relu_before_depthwise_conv, 'exhaustive_search': self.exhaustive_search } self.outputs = {'Output': output} def has_cuda(self): return core.is_compiled_with_cuda() and (self.use_cudnn or self.use_cuda) def test_check_output(self): place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() # TODO(wangzhongpu): support mkldnn op in dygraph mode self.check_output_with_place( place, atol=1e-5, check_dygraph=(self.use_mkldnn == False)) def test_check_grad(self): if self.dtype == np.float16 or (hasattr(self, "no_need_check_grad") and self.no_need_check_grad == True): return place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() # TODO(wangzhongpu): support mkldnn op in dygraph mode self.check_grad_with_place( place, {'Input', 'Filter'}, 'Output', max_relative_error=0.02, check_dygraph=(self.use_mkldnn == False)) def test_check_grad_no_filter(self): if self.dtype == np.float16 or (hasattr(self, "no_need_check_grad") and self.no_need_check_grad == True): return place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() # TODO(wangzhongpu): support mkldnn op in dygraph mode self.check_grad_with_place( place, ['Input'], 'Output', max_relative_error=0.02, no_grad_set=set(['Filter']), check_dygraph=(self.use_mkldnn == False)) def test_check_grad_no_input(self): if self.dtype == np.float16 or (hasattr(self, "no_need_check_grad") and self.no_need_check_grad == True): return place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() # TODO(wangzhongpu): support mkldnn op in dygraph mode self.check_grad_with_place( place, ['Filter'], 'Output', no_grad_set=set(['Input']), check_dygraph=(self.use_mkldnn == False)) def init_test_case(self): self.pad = [0, 0] self.stride = [1, 1] self.input_size = [2, 3, 5, 5] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [6, f_c, 3, 3] def init_test_case_2(self): pass def init_dilation(self): self.dilations = [1, 1] def init_group(self): self.groups = 1 def init_kernel_type(self): pass class TestWithPad(TestConv2DOp): def init_test_case(self): self.pad = [1, 1] self.stride = [1, 1] self.input_size = [2, 3, 5, 5] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [6, f_c, 3, 3] class TestWithStride(TestConv2DOp): def init_test_case(self): self.pad = [1, 1] self.stride = [2, 2] self.input_size = [2, 3, 6, 6] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [6, f_c, 3, 3] class TestWithGroup(TestConv2DOp): def init_test_case(self): self.pad = [0, 0] self.stride = [1, 1] self.input_size = [2, 3, 5, 5] # NCHW self.group = 3 assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [18, f_c, 3, 3] class TestWith1x1(TestConv2DOp): def init_test_case(self): self.pad = [0, 0] self.stride = [1, 1] self.input_size = [2, 3, 5, 5] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [120, f_c, 1, 1] def init_group(self): self.groups = 3 class TestWithDepthWise3x3(TestConv2DOp): def init_test_case(self): self.pad = [1, 1] self.stride = [1, 1] self.input_size = [3, 4, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [12, f_c, 3, 3] def init_dilation(self): self.dilations = [2, 2] def init_group(self): self.groups = 4 class TestWithDepthWise5x5(TestConv2DOp): def init_test_case(self): self.pad = [0, 0] self.stride = [1, 1] self.input_size = [2, 4, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [8, f_c, 5, 5] def init_group(self): self.groups = 4 class TestWithDepthWise7x7(TestConv2DOp): def init_test_case(self): self.pad = [1, 1] self.stride = [2, 2] self.input_size = [2, 8, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [16, f_c, 7, 7] def init_group(self): self.groups = 8 class TestWithDilation(TestConv2DOp): def init_test_case(self): self.pad = [0, 0] self.stride = [1, 1] self.input_size = [2, 3, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [12, f_c, 3, 3] def init_dilation(self): self.dilations = [2, 2] def init_group(self): self.groups = 3 class TestWithInput1x1Filter1x1(TestConv2DOp): def init_test_case(self): self.pad = [0, 0] self.stride = [1, 1] self.input_size = [100, 3, 1, 1] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [120, f_c, 1, 1] def init_group(self): self.groups = 3 #----------------Conv2DCUDNN---------------- create_test_cudnn_class(TestConv2DOp) create_test_cudnn_class(TestWithPad) create_test_cudnn_class(TestWithStride) create_test_cudnn_class(TestWithGroup) create_test_cudnn_class(TestWith1x1) create_test_cudnn_class(TestWithInput1x1Filter1x1) #----------------Conv2DCUDNN fp16---------------- create_test_cudnn_fp16_class(TestConv2DOp, grad_check=False) create_test_cudnn_fp16_class(TestWithPad, grad_check=False) create_test_cudnn_fp16_class(TestWithStride, grad_check=False) create_test_cudnn_fp16_class(TestWithGroup, grad_check=False) create_test_cudnn_fp16_class(TestWith1x1, grad_check=False) create_test_cudnn_fp16_class(TestWithInput1x1Filter1x1, grad_check=False) #----------------Conv2DCUDNN bf16---------------- create_test_cudnn_bf16_class(TestConv2DOp) create_test_cudnn_bf16_class(TestWithPad) create_test_cudnn_bf16_class(TestWithStride) create_test_cudnn_bf16_class(TestWithGroup) create_test_cudnn_bf16_class(TestWith1x1) create_test_cudnn_bf16_class(TestWithInput1x1Filter1x1) class TestCUDNNExhaustiveSearch(TestConv2DOp): def init_kernel_type(self): self.use_cudnn = True self.exhaustive_search = True self.dtype = np.float32 if core.is_compiled_with_rocm() else np.float64 class TestConv2DOpError(unittest.TestCase): def test_errors(self): with program_guard(Program(), Program()): def test_Variable(): # the input of conv2d must be Variable. x1 = fluid.create_lod_tensor( np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace()) fluid.layers.conv2d(x1, 1, 1) self.assertRaises(TypeError, test_Variable) def test_dtype(): # the input dtype of conv2d must be float16 or float32 or float64 # float16 only can be set on GPU place x2 = fluid.layers.data( name='x2', shape=[3, 4, 5, 6], dtype="int32") fluid.layers.conv2d(x2, 1, 1) self.assertRaises(TypeError, test_dtype) # Please Don't remove the following code. # Currently, CI use cudnn V5.0 which not support dilation conv. # class TestCUDNNWithDilation(TestWithDilation): # def init_op_type(self): # self.op_type = "conv_cudnn" # ---- test asymmetric padding ---- class TestConv2DOp_v2(OpTest): def setUp(self): self.op_type = "conv2d" self.use_cudnn = False self.exhaustive_search = False self.use_cuda = False self.use_mkldnn = False self.fuse_relu_before_depthwise_conv = False self.dtype = np.float64 self.init_kernel_type() self.init_group() self.init_dilation() self.init_data_format() self.init_test_case() self.init_paddings() self.init_test_case_2() conv2d_param = { 'stride': self.stride, 'pad': self.pad, 'dilation': self.dilations } input = np.random.random(self.input_size).astype(self.dtype) if not self.has_cuda(): self.fuse_relu_before_depthwise_conv = False if self.fuse_relu_before_depthwise_conv: input = input - 0.5 input -= (input < 0) * 0.1 input += (input >= 0) * 0.1 input2 = np.maximum(input, 0.0) else: input2 = input filter = np.random.uniform(-1, 1, self.filter_size).astype(self.dtype) output, _, _, _, _ = conv2d_forward_naive( input2, filter, self.groups, conv2d_param, self.padding_algorithm, self.data_format) output = output.astype(self.dtype) self.inputs = { 'Input': OpTest.np_dtype_to_fluid_dtype(input), 'Filter': OpTest.np_dtype_to_fluid_dtype(filter) } self.attrs = { 'strides': self.stride, 'paddings': self.pad, 'padding_algorithm': self.padding_algorithm, 'groups': self.groups, 'dilations': self.dilations, 'use_cudnn': self.use_cudnn, 'use_mkldnn': self.use_mkldnn, 'data_format': self.data_format, 'fuse_relu_before_depthwise_conv': self.fuse_relu_before_depthwise_conv, 'exhaustive_search': self.exhaustive_search } self.outputs = {'Output': output} def has_cuda(self): return core.is_compiled_with_cuda() and (self.use_cudnn or self.use_cuda) def test_check_output(self): # TODO(wangzhongpu): support mkldnn op in dygraph mode place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() self.check_output_with_place( place, atol=1e-5, check_dygraph=(self.use_mkldnn == False)) def test_check_grad(self): # TODO(wangzhongpu): support mkldnn op in dygraph mode if self.dtype == np.float16: return place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() self.check_grad_with_place( place, {'Input', 'Filter'}, 'Output', max_relative_error=0.02, check_dygraph=(self.use_mkldnn == False)) def test_check_grad_no_filter(self): # TODO(wangzhongpu): support mkldnn op in dygraph mode if self.dtype == np.float16: return place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() self.check_grad_with_place( place, ['Input'], 'Output', max_relative_error=0.02, no_grad_set=set(['Filter']), check_dygraph=(self.use_mkldnn == False)) def test_check_grad_no_input(self): # TODO(wangzhongpu): support mkldnn op in dygraph mode if self.dtype == np.float16: return place = core.CUDAPlace(0) if self.has_cuda() else core.CPUPlace() self.check_grad_with_place( place, ['Filter'], 'Output', no_grad_set=set(['Input']), check_dygraph=(self.use_mkldnn == False)) def init_test_case(self): self.pad = [0, 0] self.stride = [1, 2] self.input_size = [2, 3, 5, 5] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [6, f_c, 4, 3] def init_dilation(self): self.dilations = [1, 1] def init_group(self): self.groups = 1 def init_kernel_type(self): pass def init_paddings(self): self.pad = [0, 0] self.padding_algorithm = "EXPLICIT" def init_data_format(self): self.data_format = "NCHW" def init_test_case_2(self): pass class TestConv2DOp_AsyPadding(TestConv2DOp_v2): def init_paddings(self): self.pad = [0, 0, 1, 2] self.padding_algorithm = "EXPLICIT" class TestWithPad_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [1, 1] self.input_size = [2, 3, 5, 5] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [6, f_c, 3, 3] def init_paddings(self): self.pad = [2, 1, 3, 2] self.padding_algorithm = "EXPLICIT" class TestWithStride_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [2, 2] self.input_size = [2, 3, 6, 6] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [6, f_c, 3, 3] def init_paddings(self): self.pad = [2, 1, 3, 2] self.padding_algorithm = "EXPLICIT" class TestWithGroup_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.pad = [0, 0] self.stride = [1, 2] self.input_size = [2, 3, 5, 5] # NCHW self.group = 3 assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [24, f_c, 4, 3] class TestWith1x1_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [1, 1] self.input_size = [2, 3, 5, 5] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [120, f_c, 1, 1] def init_group(self): self.groups = 3 def init_paddings(self): self.pad = [2, 2, 4, 0] self.padding_algorithm = "EXPLICIT" class TestWithDepthWise3x3_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [1, 1] self.input_size = [3, 4, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [16, f_c, 3, 3] def init_dilation(self): self.dilations = [2, 2] def init_group(self): self.groups = 4 def init_paddings(self): self.pad = [1, 3, 2, 1] self.padding_algorithm = "EXPLICIT" class TestWithDepthWise5x5_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [1, 1] self.input_size = [2, 4, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [8, f_c, 5, 5] def init_group(self): self.groups = 4 def init_paddings(self): self.pad = [0, 1, 1, 0] self.padding_algorithm = "EXPLICIT" class TestWithDepthWise7x7_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [2, 2] self.input_size = [2, 8, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [16, f_c, 7, 7] def init_group(self): self.groups = 8 def init_paddings(self): self.pad = [1, 3, 4, 1] self.padding_algorithm = "EXPLICIT" class TestWithDilation_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [1, 1] self.input_size = [2, 3, 10, 10] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [24, f_c, 3, 3] def init_dilation(self): self.dilations = [2, 2] def init_group(self): self.groups = 3 def init_paddings(self): self.pad = [0, 1, 3, 0] self.padding_algorithm = "EXPLICIT" class TestWithInput1x1Filter1x1_AsyPadding(TestConv2DOp_v2): def init_test_case(self): self.stride = [1, 1] self.input_size = [40, 3, 1, 1] # NCHW assert np.mod(self.input_size[1], self.groups) == 0 f_c = self.input_size[1] // self.groups self.filter_size = [120, f_c, 1, 1] def init_group(self): self.groups = 3 def init_paddings(self): self.pad = [0, 3, 4, 0] self.padding_algorithm = "EXPLICIT" create_test_cudnn_class(TestConv2DOp_AsyPadding) create_test_cudnn_class(TestWithPad_AsyPadding) create_test_cudnn_class(TestWithStride_AsyPadding) create_test_cudnn_class(TestWithGroup_AsyPadding) create_test_cudnn_class(TestWith1x1_AsyPadding) create_test_cudnn_class(TestWithInput1x1Filter1x1_AsyPadding) #---------- test SAME VALID ----------- create_test_padding_SAME_class(TestConv2DOp_AsyPadding) create_test_padding_SAME_class(TestWithPad_AsyPadding) create_test_padding_SAME_class(TestWithStride_AsyPadding) create_test_padding_SAME_class(TestWithGroup_AsyPadding) create_test_padding_SAME_class(TestWithInput1x1Filter1x1_AsyPadding) create_test_padding_VALID_class(TestConv2DOp_AsyPadding) create_test_padding_VALID_class(TestWithPad_AsyPadding) create_test_padding_VALID_class(TestWithStride_AsyPadding) create_test_padding_VALID_class(TestWithGroup_AsyPadding) create_test_padding_VALID_class(TestWithInput1x1Filter1x1_AsyPadding) create_test_cudnn_padding_SAME_class(TestConv2DOp_AsyPadding) create_test_cudnn_padding_SAME_class(TestWithPad_AsyPadding) create_test_cudnn_padding_SAME_class(TestWithStride_AsyPadding) create_test_cudnn_padding_SAME_class(TestWithGroup_AsyPadding) create_test_cudnn_padding_SAME_class(TestWithInput1x1Filter1x1_AsyPadding) create_test_cudnn_padding_VALID_class(TestConv2DOp_AsyPadding) create_test_cudnn_padding_VALID_class(TestWithPad_AsyPadding) create_test_cudnn_padding_VALID_class(TestWithStride_AsyPadding) create_test_cudnn_padding_VALID_class(TestWithGroup_AsyPadding) create_test_cudnn_padding_VALID_class(TestWithInput1x1Filter1x1_AsyPadding) # ------------ test channel last --------- create_test_channel_last_class(TestConv2DOp_AsyPadding) create_test_channel_last_class(TestWithPad_AsyPadding) create_test_channel_last_class(TestWithGroup_AsyPadding) create_test_channel_last_class(TestWith1x1_AsyPadding) create_test_channel_last_class(TestWithInput1x1Filter1x1_AsyPadding) create_test_cudnn_channel_last_class(TestConv2DOp_AsyPadding) create_test_cudnn_channel_last_class(TestWithPad_AsyPadding) create_test_cudnn_channel_last_class(TestWithStride_AsyPadding) create_test_cudnn_channel_last_class(TestWithGroup_AsyPadding) create_test_cudnn_channel_last_class(TestWithDilation_AsyPadding) create_test_cudnn_channel_last_fp16_class( TestConv2DOp_AsyPadding, grad_check=False) create_test_cudnn_channel_last_fp16_class( TestWithPad_AsyPadding, grad_check=False) create_test_cudnn_channel_last_fp16_class( TestWithStride_AsyPadding, grad_check=False) create_test_cudnn_channel_last_fp16_class( TestWithGroup_AsyPadding, grad_check=False) create_test_cudnn_channel_last_fp16_class( TestWithDilation_AsyPadding, grad_check=False) if __name__ == '__main__': unittest.main()
apache-2.0
-7,622,533,692,147,087,000
34.27318
81
0.578027
false
3.215104
true
false
false
mizdebsk/javapackages
java-utils/maven_depmap.py
1
11246
# # Copyright (c) 2014, Red Hat, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the # distribution. # 3. Neither the name of Red Hat nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Stanislav Ochotnicky <sochotnicky@redhat.com> # # this script is used by add_maven_depmap rpm macro to generate # mapping between maven groupId:artifactId and jar file in our local # filesystem (i.e. %{_javadir}) # rpm macro expects to find this file as %{_javadir}-utils/maven_depmap.py from optparse import OptionParser import os import shutil import sys from os.path import basename, dirname import zipfile from time import gmtime, strftime from copy import deepcopy from javapackages.maven.pom import POM from javapackages.metadata.artifact import MetadataArtifact from javapackages.metadata.alias import MetadataAlias from javapackages.metadata.metadata import Metadata from javapackages.common.exception import JavaPackagesToolsException class PackagingTypeMissingFile(JavaPackagesToolsException): def __init__(self, pom_path): self.args=("Packaging type is not 'pom' and no artifact path has been provided for POM %s" % pom_path,) class IncompatibleFilenames(JavaPackagesToolsException): def __init__(self, pom_path, jar_path): self.args=("Filenames of POM %s and JAR %s does not match properly. Check that JAR subdirectories matches '.' in pom name." % (pom_path, jar_path),) class ExtensionsDontMatch(JavaPackagesToolsException): def __init__(self, coordinates_ext, file_ext): self.args=("Extensions don't match: '%s' != '%s'" % (coordinates_ext, file_ext),) class MissingJarFile(JavaPackagesToolsException): def __init__(self): self.args=("JAR seems to be missing in standard directories. Make sure you have installed it",) class UnknownFileExtension(JavaPackagesToolsException): def __init__(self, jar_path): self.args=("Unknown file extension: %s" % (jar_path),) def _print_path_with_dirs(path, base): print(path) path = dirname(path) while path != base and path != '/': print("%dir " + path) path = dirname(path) def _make_files_versioned(versions, pom_path, jar_path, pom_base, jar_base): """Make pom and jar file versioned""" versions = list(set(versions.split(','))) vpom_path = pom_path vjar_path = jar_path ret_pom_path = pom_path ret_jar_path = jar_path # pom if ':' not in vpom_path: root, ext = os.path.splitext(vpom_path) symlink = False for ver in sorted(versions): dest = "%s-%s%s" % (root, ver, ext) if not symlink: shutil.copy(os.path.realpath(vpom_path), dest) symlink = True vpom_path = dest ret_pom_path = dest else: os.symlink(basename(vpom_path), dest) # output file path for file lists _print_path_with_dirs(dest, pom_base) # remove unversioned pom os.remove(pom_path) # jar if vjar_path: root, ext = os.path.splitext(vjar_path) symlink = False for ver in sorted(versions): dest = "%s-%s%s" % (root, ver, ext) if not symlink: shutil.copy(os.path.realpath(vjar_path), dest) symlink = True vjar_path = dest ret_jar_path = dest else: os.symlink(basename(vjar_path), dest) # output file path for file lists _print_path_with_dirs(dest, jar_base) # remove unversioned jar os.remove(jar_path) # return paths to versioned, but regular files (not symlinks) return ret_pom_path, ret_jar_path # Add a file to a ZIP archive (or JAR, WAR, ...) unless the file # already exists in the archive. Provided by Tomas Radej. def append_if_missing(archive_name, file_name, file_contents): archive = zipfile.ZipFile(archive_name, 'a') try: if file_name not in archive.namelist(): archive.writestr(file_name, file_contents) finally: archive.close() # Inject pom.properties if JAR doesn't have one. This is necessary to # identify the origin of JAR files that are present in the repository. def inject_pom_properties(jar_path, artifact): if not zipfile.is_zipfile(jar_path): return props_path = "META-INF/maven/{a.groupId}/{a.artifactId}/pom.properties".format(a=artifact) properties = """#Generated by Java Packages Tools version={a.version} groupId={a.groupId} artifactId={a.artifactId} """.format(a=artifact) if artifact.extension: properties = properties + \ "extension={ext}\n".format(ext=artifact.extension) if artifact.classifier: properties = properties + \ "classifier={clas}\n".format(clas=artifact.classifier) append_if_missing(jar_path, props_path, properties) def add_compat_versions(artifact, versions): if not versions: return artifact artifact.compatVersions = versions.split(',') return artifact def add_aliases(artifact, additions): if not additions: return artifact aliases = additions.split(',') result = set() for a in aliases: alias = MetadataAlias.from_mvn_str(a) alias.extension = artifact.extension result.add(alias) artifact.aliases = result return artifact def write_metadata(metadata_file, artifacts): if os.path.exists(metadata_file): metadata = Metadata.create_from_file(metadata_file) else: metadata = Metadata() # pylint:disable=E1103 metadata.artifacts += deepcopy(artifacts) metadata.write_to_file(metadata_file) def _main(): usage="usage: %prog [options] metadata_path pom_path|<MVN spec> [jar_path]" parser = OptionParser(usage=usage) parser.add_option("-a","--append",type="str", help="Additional depmaps to add (gid:aid) [default: %default]") parser.add_option('-r', '--versions', type="str", help='Additional versions to add for each depmap') parser.add_option('-n', '--namespace', type="str", help='Namespace to use for generated fragments', default="") parser.add_option('--pom-base', type="str", help='Base path under which POM files are installed', default="") parser.add_option('--jar-base', type="str", help='Base path under which JAR files are installed', default="") parser.set_defaults(append=None) (options, args) = parser.parse_args() append_deps = options.append add_versions = options.versions namespace = options.namespace pom_base = options.pom_base jar_base = options.jar_base if len(args) < 2: parser.error("Incorrect number of arguments") # These will fail when incorrect number of arguments is given. metadata_path = args[0].strip() pom_path = args[1].strip() jar_path = None artifact = None have_pom = False if len(args) == 3: jar_path = args[2].strip() if ':' in pom_path: pom_str = pom_path.rsplit('/')[-1] artifact = MetadataArtifact.from_mvn_str(pom_str) artifact_ext = artifact.extension or "jar" file_ext = os.path.splitext(jar_path)[1][1:] if artifact_ext != file_ext: raise ExtensionsDontMatch(artifact_ext, file_ext) if artifact.extension == 'jar': artifact.extension = '' if not artifact.version: parser.error("Artifact definition has to include version") else: artifact = MetadataArtifact.from_pom(pom_path) ext = os.path.splitext(jar_path)[1][1:] if ext != "jar": artifact.extension = ext have_pom = True if artifact: inject_pom_properties(jar_path, artifact) else: # looks like POM only artifact if ':' not in pom_path: artifact = MetadataArtifact.from_pom(pom_path) have_pom = True if POM(pom_path).packaging != "pom": raise PackagingTypeMissingFile(pom_path) else: sys.exit("JAR file path must be specified when using artifact coordinates") # output file path for file lists print(metadata_path) artifact = add_compat_versions(artifact, add_versions) if add_versions: pom_path, jar_path = _make_files_versioned(add_versions, pom_path, jar_path, pom_base, jar_base) if namespace: artifact.namespace = namespace artifact.properties["xmvn.resolver.disableEffectivePom"] = "true" buildroot = os.environ.get('RPM_BUILD_ROOT') am = [] if jar_path: metadata_jar_path = os.path.abspath(jar_path) artifact.path = metadata_jar_path.replace(buildroot, "") if buildroot else metadata_jar_path artifact = add_aliases(artifact, append_deps) if artifact.extension == "jar": artifact.extension = "" am.append(artifact.copy()) # output file path for file list (if it's not versioned) if not add_versions: _print_path_with_dirs(jar_path, jar_base) if have_pom: metadata_pom_path = os.path.abspath(pom_path) artifact.path = metadata_pom_path.replace(buildroot, "") if buildroot else metadata_pom_path artifact.extension = "pom" artifact.aliases = None artifact = add_aliases(artifact, append_deps) am.append(artifact.copy()) # output file path for file list (if it's not versioned) if not add_versions: _print_path_with_dirs(pom_path, pom_base) write_metadata(metadata_path, am) if __name__ == "__main__": try: _main() except JavaPackagesToolsException as e: sys.exit(e)
bsd-3-clause
-2,782,331,628,701,944,000
34.701587
156
0.649475
false
3.87259
false
false
false
jonathonwalz/ansible
lib/ansible/modules/cloud/amazon/ec2_group.py
2
23019
#!/usr/bin/python # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'curated'} DOCUMENTATION = ''' --- module: ec2_group author: "Andrew de Quincey (@adq)" version_added: "1.3" short_description: maintain an ec2 VPC security group. description: - maintains ec2 security groups. This module has a dependency on python-boto >= 2.5 options: name: description: - Name of the security group. - One of and only one of I(name) or I(group_id) is required. - Required if I(state=present). required: false group_id: description: - Id of group to delete (works only with absent). - One of and only one of I(name) or I(group_id) is required. required: false version_added: "2.4" description: description: - Description of the security group. Required when C(state) is C(present). required: false vpc_id: description: - ID of the VPC to create the group in. required: false rules: description: - List of firewall inbound rules to enforce in this group (see example). If none are supplied, no inbound rules will be enabled. Rules list may include its own name in `group_name`. This allows idempotent loopback additions (e.g. allow group to access itself). Rule sources list support was added in version 2.4. This allows to define multiple sources per source type as well as multiple source types per rule. Prior to 2.4 an individual source is allowed. required: false rules_egress: description: - List of firewall outbound rules to enforce in this group (see example). If none are supplied, a default all-out rule is assumed. If an empty list is supplied, no outbound rules will be enabled. Rule Egress sources list support was added in version 2.4. required: false version_added: "1.6" state: version_added: "1.4" description: - Create or delete a security group required: false default: 'present' choices: [ "present", "absent" ] aliases: [] purge_rules: version_added: "1.8" description: - Purge existing rules on security group that are not found in rules required: false default: 'true' aliases: [] purge_rules_egress: version_added: "1.8" description: - Purge existing rules_egress on security group that are not found in rules_egress required: false default: 'true' aliases: [] extends_documentation_fragment: - aws - ec2 notes: - If a rule declares a group_name and that group doesn't exist, it will be automatically created. In that case, group_desc should be provided as well. The module will refuse to create a depended-on group without a description. ''' EXAMPLES = ''' - name: example ec2 group ec2_group: name: example description: an example EC2 group vpc_id: 12345 region: eu-west-1 aws_secret_key: SECRET aws_access_key: ACCESS rules: - proto: tcp from_port: 80 to_port: 80 cidr_ip: 0.0.0.0/0 - proto: tcp from_port: 22 to_port: 22 cidr_ip: 10.0.0.0/8 - proto: tcp from_port: 443 to_port: 443 group_id: amazon-elb/sg-87654321/amazon-elb-sg - proto: tcp from_port: 3306 to_port: 3306 group_id: 123412341234/sg-87654321/exact-name-of-sg - proto: udp from_port: 10050 to_port: 10050 cidr_ip: 10.0.0.0/8 - proto: udp from_port: 10051 to_port: 10051 group_id: sg-12345678 - proto: icmp from_port: 8 # icmp type, -1 = any type to_port: -1 # icmp subtype, -1 = any subtype cidr_ip: 10.0.0.0/8 - proto: all # the containing group name may be specified here group_name: example rules_egress: - proto: tcp from_port: 80 to_port: 80 cidr_ip: 0.0.0.0/0 group_name: example-other # description to use if example-other needs to be created group_desc: other example EC2 group - name: example2 ec2 group ec2_group: name: example2 description: an example2 EC2 group vpc_id: 12345 region: eu-west-1 rules: # 'ports' rule keyword was introduced in version 2.4. It accepts a single port value or a list of values including ranges (from_port-to_port). - proto: tcp ports: 22 group_name: example-vpn - proto: tcp ports: - 80 - 443 - 8080-8099 cidr_ip: 0.0.0.0/0 # Rule sources list support was added in version 2.4. This allows to define multiple sources per source type as well as multiple source types per rule. - proto: tcp ports: - 6379 - 26379 group_name: - example-vpn - example-redis - proto: tcp ports: 5665 group_name: example-vpn cidr_ip: - 172.16.1.0/24 - 172.16.17.0/24 group_id: - sg-edcd9784 - name: "Delete group by its id" ec2_group: group_id: sg-33b4ee5b state: absent ''' import json import re import time from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import ec2_connect, ec2_argument_spec try: import boto.ec2 from boto.ec2.securitygroup import SecurityGroup from boto.exception import BotoServerError HAS_BOTO = True except ImportError: HAS_BOTO = False import traceback def deduplicate_rules_args(rules): """Returns unique rules""" if rules is None: return None return list(dict(zip((json.dumps(r, sort_keys=True) for r in rules), rules)).values()) def make_rule_key(prefix, rule, group_id, cidr_ip): """Creates a unique key for an individual group rule""" if isinstance(rule, dict): proto, from_port, to_port = [rule.get(x, None) for x in ('proto', 'from_port', 'to_port')] # fix for 11177 if proto not in ['icmp', 'tcp', 'udp'] and from_port == -1 and to_port == -1: from_port = 'none' to_port = 'none' else: # isinstance boto.ec2.securitygroup.IPPermissions proto, from_port, to_port = [getattr(rule, x, None) for x in ('ip_protocol', 'from_port', 'to_port')] key = "%s-%s-%s-%s-%s-%s" % (prefix, proto, from_port, to_port, group_id, cidr_ip) return key.lower().replace('-none', '-None') def addRulesToLookup(rules, prefix, rules_dict): for rule in rules: for grant in rule.grants: rules_dict[make_rule_key(prefix, rule, grant.group_id, grant.cidr_ip)] = (rule, grant) def validate_rule(module, rule): VALID_PARAMS = ('cidr_ip', 'group_id', 'group_name', 'group_desc', 'proto', 'from_port', 'to_port') if not isinstance(rule, dict): module.fail_json(msg='Invalid rule parameter type [%s].' % type(rule)) for k in rule: if k not in VALID_PARAMS: module.fail_json(msg='Invalid rule parameter \'{}\''.format(k)) if 'group_id' in rule and 'cidr_ip' in rule: module.fail_json(msg='Specify group_id OR cidr_ip, not both') elif 'group_name' in rule and 'cidr_ip' in rule: module.fail_json(msg='Specify group_name OR cidr_ip, not both') elif 'group_id' in rule and 'group_name' in rule: module.fail_json(msg='Specify group_id OR group_name, not both') def get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id): """ Returns tuple of (group_id, ip) after validating rule params. rule: Dict describing a rule. name: Name of the security group being managed. groups: Dict of all available security groups. AWS accepts an ip range or a security group as target of a rule. This function validate the rule specification and return either a non-None group_id or a non-None ip range. """ FOREIGN_SECURITY_GROUP_REGEX = '^(\S+)/(sg-\S+)/(\S+)' group_id = None group_name = None ip = None target_group_created = False if 'group_id' in rule and 'cidr_ip' in rule: module.fail_json(msg="Specify group_id OR cidr_ip, not both") elif 'group_name' in rule and 'cidr_ip' in rule: module.fail_json(msg="Specify group_name OR cidr_ip, not both") elif 'group_id' in rule and 'group_name' in rule: module.fail_json(msg="Specify group_id OR group_name, not both") elif 'group_id' in rule and re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']): # this is a foreign Security Group. Since you can't fetch it you must create an instance of it owner_id, group_id, group_name = re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']).groups() group_instance = SecurityGroup(owner_id=owner_id, name=group_name, id=group_id) groups[group_id] = group_instance groups[group_name] = group_instance elif 'group_id' in rule: group_id = rule['group_id'] elif 'group_name' in rule: group_name = rule['group_name'] if group_name == name: group_id = group.id groups[group_id] = group groups[group_name] = group elif group_name in groups and (vpc_id is None or groups[group_name].vpc_id == vpc_id): group_id = groups[group_name].id else: if not rule.get('group_desc', '').strip(): module.fail_json(msg="group %s will be automatically created by rule %s and no description was provided" % (group_name, rule)) if not module.check_mode: auto_group = ec2.create_security_group(group_name, rule['group_desc'], vpc_id=vpc_id) group_id = auto_group.id groups[group_id] = auto_group groups[group_name] = auto_group target_group_created = True elif 'cidr_ip' in rule: ip = rule['cidr_ip'] return group_id, ip, target_group_created def ports_expand(ports): # takes a list of ports and returns a list of (port_from, port_to) ports_expanded = [] for port in ports: if not isinstance(port, str): ports_expanded.append((port,) * 2) elif '-' in port: ports_expanded.append(tuple(p.strip() for p in port.split('-', 1))) else: ports_expanded.append((port.strip(),) * 2) return ports_expanded def rule_expand_ports(rule): # takes a rule dict and returns a list of expanded rule dicts if 'ports' not in rule: return [rule] ports = rule['ports'] if isinstance(rule['ports'], list) else [rule['ports']] rule_expanded = [] for from_to in ports_expand(ports): temp_rule = rule.copy() del temp_rule['ports'] temp_rule['from_port'], temp_rule['to_port'] = from_to rule_expanded.append(temp_rule) return rule_expanded def rules_expand_ports(rules): # takes a list of rules and expands it based on 'ports' if not rules: return rules return [rule for rule_complex in rules for rule in rule_expand_ports(rule_complex)] def rule_expand_source(rule, source_type): # takes a rule dict and returns a list of expanded rule dicts for specified source_type sources = rule[source_type] if isinstance(rule[source_type], list) else [rule[source_type]] source_types_all = ('cidr_ip', 'group_id', 'group_name') rule_expanded = [] for source in sources: temp_rule = rule.copy() for s in source_types_all: temp_rule.pop(s, None) temp_rule[source_type] = source rule_expanded.append(temp_rule) return rule_expanded def rule_expand_sources(rule): # takes a rule dict and returns a list of expanded rule discts source_types = (stype for stype in ('cidr_ip', 'group_id', 'group_name') if stype in rule) return [r for stype in source_types for r in rule_expand_source(rule, stype)] def rules_expand_sources(rules): # takes a list of rules and expands it based on 'cidr_ip', 'group_id', 'group_name' if not rules: return rules return [rule for rule_complex in rules for rule in rule_expand_sources(rule_complex)] def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( name=dict(), group_id=dict(), description=dict(), vpc_id=dict(), rules=dict(type='list'), rules_egress=dict(type='list'), state=dict(default='present', type='str', choices=['present', 'absent']), purge_rules=dict(default=True, required=False, type='bool'), purge_rules_egress=dict(default=True, required=False, type='bool'), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_one_of=[['name', 'group_id']], required_if=[['state', 'present', ['name']]], ) if not HAS_BOTO: module.fail_json(msg='boto required for this module') name = module.params['name'] group_id = module.params['group_id'] description = module.params['description'] vpc_id = module.params['vpc_id'] rules = deduplicate_rules_args(rules_expand_sources(rules_expand_ports(module.params['rules']))) rules_egress = deduplicate_rules_args(rules_expand_sources(rules_expand_ports(module.params['rules_egress']))) state = module.params.get('state') purge_rules = module.params['purge_rules'] purge_rules_egress = module.params['purge_rules_egress'] if state == 'present' and not description: module.fail_json(msg='Must provide description when state is present.') changed = False ec2 = ec2_connect(module) # find the group if present group = None groups = {} try: security_groups = ec2.get_all_security_groups() except BotoServerError as e: module.fail_json(msg="Error in get_all_security_groups: %s" % e.message, exception=traceback.format_exc()) for curGroup in security_groups: groups[curGroup.id] = curGroup if curGroup.name in groups: # Prioritise groups from the current VPC if vpc_id is None or curGroup.vpc_id == vpc_id: groups[curGroup.name] = curGroup else: groups[curGroup.name] = curGroup if group_id: if curGroup.id == group_id: group = curGroup else: if curGroup.name == name and (vpc_id is None or curGroup.vpc_id == vpc_id): group = curGroup # Ensure requested group is absent if state == 'absent': if group: # found a match, delete it try: if not module.check_mode: group.delete() except BotoServerError as e: module.fail_json(msg="Unable to delete security group '%s' - %s" % (group, e.message), exception=traceback.format_exc()) else: group = None changed = True else: # no match found, no changes required pass # Ensure requested group is present elif state == 'present': if group: # existing group if group.description != description: module.fail_json(msg="Group description does not match existing group. ec2_group does not support this case.") # if the group doesn't exist, create it now else: # no match found, create it if not module.check_mode: group = ec2.create_security_group(name, description, vpc_id=vpc_id) # When a group is created, an egress_rule ALLOW ALL # to 0.0.0.0/0 is added automatically but it's not # reflected in the object returned by the AWS API # call. We re-read the group for getting an updated object # amazon sometimes takes a couple seconds to update the security group so wait till it exists while len(ec2.get_all_security_groups(filters={'group_id': group.id})) == 0: time.sleep(0.1) group = ec2.get_all_security_groups(group_ids=(group.id,))[0] changed = True else: module.fail_json(msg="Unsupported state requested: %s" % state) # create a lookup for all existing rules on the group if group: # Manage ingress rules groupRules = {} addRulesToLookup(group.rules, 'in', groupRules) # Now, go through all provided rules and ensure they are there. if rules is not None: for rule in rules: validate_rule(module, rule) group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id) if target_group_created: changed = True if rule['proto'] in ('all', '-1', -1): rule['proto'] = -1 rule['from_port'] = None rule['to_port'] = None # Convert ip to list we can iterate over if not isinstance(ip, list): ip = [ip] # If rule already exists, don't later delete it for thisip in ip: ruleId = make_rule_key('in', rule, group_id, thisip) if ruleId not in groupRules: grantGroup = None if group_id: grantGroup = groups[group_id] if not module.check_mode: group.authorize(rule['proto'], rule['from_port'], rule['to_port'], thisip, grantGroup) changed = True else: del groupRules[ruleId] # Finally, remove anything left in the groupRules -- these will be defunct rules if purge_rules: for (rule, grant) in groupRules.values(): grantGroup = None if grant.group_id: if grant.owner_id != group.owner_id: # this is a foreign Security Group. Since you can't fetch it you must create an instance of it group_instance = SecurityGroup(owner_id=grant.owner_id, name=grant.name, id=grant.group_id) groups[grant.group_id] = group_instance groups[grant.name] = group_instance grantGroup = groups[grant.group_id] if not module.check_mode: group.revoke(rule.ip_protocol, rule.from_port, rule.to_port, grant.cidr_ip, grantGroup) changed = True # Manage egress rules groupRules = {} addRulesToLookup(group.rules_egress, 'out', groupRules) # Now, go through all provided rules and ensure they are there. if rules_egress is not None: for rule in rules_egress: validate_rule(module, rule) group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id) if target_group_created: changed = True if rule['proto'] in ('all', '-1', -1): rule['proto'] = -1 rule['from_port'] = None rule['to_port'] = None # Convert ip to list we can iterate over if not isinstance(ip, list): ip = [ip] # If rule already exists, don't later delete it for thisip in ip: ruleId = make_rule_key('out', rule, group_id, thisip) if ruleId in groupRules: del groupRules[ruleId] # Otherwise, add new rule else: grantGroup = None if group_id: grantGroup = groups[group_id].id if not module.check_mode: ec2.authorize_security_group_egress( group_id=group.id, ip_protocol=rule['proto'], from_port=rule['from_port'], to_port=rule['to_port'], src_group_id=grantGroup, cidr_ip=thisip) changed = True else: # when no egress rules are specified, # we add in a default allow all out rule, which was the # default behavior before egress rules were added default_egress_rule = 'out--1-None-None-None-0.0.0.0/0' if default_egress_rule not in groupRules: if not module.check_mode: ec2.authorize_security_group_egress( group_id=group.id, ip_protocol=-1, from_port=None, to_port=None, src_group_id=None, cidr_ip='0.0.0.0/0' ) changed = True else: # make sure the default egress rule is not removed del groupRules[default_egress_rule] # Finally, remove anything left in the groupRules -- these will be defunct rules if purge_rules_egress: for (rule, grant) in groupRules.values(): grantGroup = None if grant.group_id: grantGroup = groups[grant.group_id].id if not module.check_mode: ec2.revoke_security_group_egress( group_id=group.id, ip_protocol=rule.ip_protocol, from_port=rule.from_port, to_port=rule.to_port, src_group_id=grantGroup, cidr_ip=grant.cidr_ip) changed = True if group: module.exit_json(changed=changed, group_id=group.id) else: module.exit_json(changed=changed, group_id=None) if __name__ == '__main__': main()
gpl-3.0
-2,607,996,531,438,057,500
35.422468
157
0.58039
false
3.937564
false
false
false
Torrib/gradestats
grades/migrations/0001_initial.py
1
2914
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-03-28 17:10 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('norwegian_name', models.CharField(max_length=255, verbose_name='Norwegian Name')), ('short_name', models.CharField(max_length=50, verbose_name='Short name')), ('code', models.CharField(max_length=15, verbose_name='Code')), ('faculty_code', models.IntegerField(default=0, verbose_name='Faculty Code')), ('english_name', models.CharField(max_length=255, verbose_name='English name')), ('credit', models.FloatField(default=7.5, verbose_name='Credit')), ('study_level', models.SmallIntegerField()), ('taught_in_spring', models.BooleanField(default=False)), ('taught_in_autumn', models.BooleanField(default=False)), ('taught_from', models.IntegerField()), ('taught_in_english', models.BooleanField(default=False)), ('last_year_taught', models.IntegerField(default=0)), ('content', models.TextField()), ('learning_form', models.TextField()), ('learning_goal', models.TextField()), ], ), migrations.CreateModel( name='Grade', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('semester_code', models.CharField(max_length=10, verbose_name='Semester')), ('average_grade', models.FloatField()), ('passed', models.IntegerField(default=0)), ('a', models.SmallIntegerField(default=0)), ('b', models.SmallIntegerField(default=0)), ('c', models.SmallIntegerField(default=0)), ('d', models.SmallIntegerField(default=0)), ('e', models.SmallIntegerField(default=0)), ('f', models.SmallIntegerField(default=0)), ('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='grades.Course')), ], ), migrations.CreateModel( name='Tag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tag', models.CharField(max_length=32, verbose_name='Tag text')), ('courses', models.ManyToManyField(to='grades.Course')), ], ), ]
apache-2.0
7,684,794,871,829,306,000
46
114
0.563143
false
4.408472
false
false
false
chslion/raspy
raspytasks/rcsocket/socket.py
1
8753
# -*- coding: utf-8 -*- import random from raspysystem.raspycollection import RasPyCollection from raspysystem.raspysamplelogger import RasPySampleLogger from raspysystem.raspyenergymeter import RasPyEnergyMeter from raspysystem.raspytask import RasPySimpleTask from rcswitch import SwitchTypeB class Socket(object): AUTOMAT_UNKNOWN = "?" # 2% noise NOISE_FULLSCALE = 0.02 MODE_USER = 0 MODE_AUTO = 1 def __init__( self, location, name, prms, address, period, maxlogs, db_path, db_prefix ): self._automat = self.AUTOMAT_UNKNOWN self._automat_msg = self.AUTOMAT_UNKNOWN self._mode = self.MODE_USER self._name = name self._location = location self._address = address self._state_user = False self._state_auto = False self._last_state = False self._state_changed = False self._prms = prms self._meter = RasPyEnergyMeter( period, db_path, db_prefix ) self._log = RasPySampleLogger(maxlogs) def get_mode(self): return self._mode def get_state_user(self): return self._state_user def get_state_auto(self): return self._state_auto def get_state_changed(self): return self._state_changed def get_name(self): return self._name def get_location(self): return self._location def get_address(self): return self._address def get_meter(self): return self._meter def switch_auto(self, state, automat, msg): self._automat = automat self._automat_msg = msg self._state_auto = state def mode_auto(self): self._mode = self.MODE_AUTO def mode_user(self, newstate): self._mode = self.MODE_USER self._state_user = newstate def get_state(self): if self._mode == self.MODE_AUTO: return self._state_auto return self._state_user def update(self, time): # change event generation self._state_changed = self.get_state() != self._last_state self._last_state = self.get_state() state = self.get_state() if state: noise = self._prms * self.NOISE_FULLSCALE power = self._prms + random.uniform(-noise, +noise) else: power = 0 self._meter.update(time, power) self._log.log(time.jstimestamp(), 1 if state else 0) def serialize(self): return dict( name=self._name, location=self._location, state=self.get_state(), automat=self._automat, automat_msg=self._automat_msg, mode=self._mode, prms=self._prms, log=self._log.serialize(), energy=self._meter.serialize() ) class SocketCollection(RasPyCollection): def __init__(self, collection): RasPyCollection.__init__(self, collection) def find_name(self, name, invert=False): socks = filter(lambda s: (s.get_name() == name) ^ invert, self._items) return SocketCollection(socks) def find_location(self, location, invert=False): socks = filter(lambda s: (s.get_location() == location) ^ invert, self._items) return SocketCollection(socks) def get(self, address): for socket in self._items: if socket.get_address() == address: return socket return None def get_index(self, address): for index in range(len(self._items)): socket = self._items[index] if socket.get_address() == address: return index return -1 class SocketControllerTask(RasPySimpleTask): RF_TX_PIN = 4 def __init__(self, parent): RasPySimpleTask.__init__(self, parent, "socketctrl") self._rcswitch = SwitchTypeB(self.RF_TX_PIN, True) self._last_switch_count = 0 self._sockets = list() # if user wants to switch all self._force_all = False def get_rcswitch(self): return self._rcswitch def get_sockets(self): return SocketCollection(self._sockets) # switch socket only if new state is not old state # so we dont spam rf every minute def _switch_all_sockets(self, time, force): for socket in self._sockets: if not force and not socket.get_state_changed(): continue self._rcswitch.switch( time, socket.get_address(), socket.get_state() ) self._last_switch_count += 1 def _req_force_all(self, args, update): if not update: return self.req_statecheck( "force_all", self._force_all == True ) self._force_all = True return self.REQ_PASS def _req_mode_user(self, args, update): address = args["address"] state = args["state"] socket = self.get_sockets().get(address) if not update: if socket is None: self.loge("Socket was not found: {}".format(address)) return self.REQ_FAIL index = self.get_sockets().get_index(address) return self.req_statecheck( "socket{}mode".format(index), ( socket.get_mode() == Socket.MODE_USER and socket.get_state_user() == state ) ) socket.mode_user(state) return self.REQ_PASS def _req_mode_auto(self, args, update): address = args["address"] socket = self.get_sockets().get(address) if not update: if socket is None: self.loge("Socket was not found: {}".format(address)) return self.REQ_FAIL index = self.get_sockets().get_index(address) return self.req_statecheck( "socket{}mode".format(index), socket.get_mode() == Socket.MODE_AUTO ) socket.mode_auto() return self.REQ_PASS def startup_event(self, db, cfg): maxlogs = self.kernel().get_updates24h() # 1) init switch self._rcswitch.hw_init() # 2) load from database # create tables if not exist db.execute( "CREATE TABLE IF NOT EXISTS '{}' ({}, {}, {}, {})".format( "rcsocket_sockets", "'address' TEXT PRIMARY KEY", "'location' TEXT", "'name' TEXT", "'prms' REAL" ) ) db.execute("SELECT * FROM rcsocket_sockets") for r in db.fetchall(): loc = str(r["location"]) name = str(r["name"]) address = str(r["address"]) # check address if self.get_sockets().get(address) is not None: self._logger.loge("Socket address is already taken: {}".format(name)) return False if not self._rcswitch.is_valid_address(address): self.loge("Socket address is invalid: {}".format(address)) return False socket = Socket( loc, name, float(r["prms"]), address, self.period(), maxlogs, self.kernel().get_databasepath(), "socket{}".format(len(self._sockets)) ) self._sockets.append(socket) # 3) register requests if not self.add_requests([ ["force_all", dict()], ["mode_user", dict(address="string",state="bool")], ["mode_auto", dict(address="string")] ]): return False return True def run_event(self): time = self.time() # update state, energy and log for socket in self._sockets: socket.update(time) self._last_switch_count = 0 self._force_all = False if time.new_hour(): # force new state every hour self._switch_all_sockets(time, True) else: # switch sockets if needed self._switch_all_sockets(time, self._force_all) return True def report_event(self): return dict( sockets=[so.serialize() for so in self._sockets], switch=dict( count=self._rcswitch.get_txcount(), last_count=self._last_switch_count, code=self._rcswitch.get_txcode(), timestamp=self._rcswitch.get_txtimestamp() ) )
gpl-3.0
6,210,325,488,518,129,000
27.235484
86
0.534902
false
4.146376
false
false
false
haje01/mersoz
mersoz/merge.py
1
1768
import os import re import codecs from optparse import OptionParser import ConfigParser from StringIO import StringIO def main(): parser = OptionParser("Usage: %prog [options] cfgpath cfgsect " "catalog-path") (options, args) = parser.parse_args() if len(args) < 3: parser.print_usage() return cfg = ConfigParser.RawConfigParser(dict(sep=' ', merge_charset='utf8', merge_skip_head=0)) cfgpath = os.path.expanduser(args[0]) cfg.read(cfgpath) cfgsect = args[1] catalog = args[2] path_ptrn = re.compile(cfg.get(cfgsect, 'path_ptrn')) charset = cfg.get(cfgsect, 'merge_charset') sep = cfg.get(cfgsect, 'seperator') sep = '\t' if sep == '\\t' else sep line_head = cfg.get(cfgsect, 'merge_line_head') merge_skip_head = int(cfg.get(cfgsect, 'merge_skip_head')) with open(catalog, 'r') as cf: for cline in cf: afile = cline.rstrip().split('\t')[0] match = path_ptrn.search(afile) if match is None: continue ginfo = match.groupdict() lhead = sep.join(line_head.format(**ginfo).split(',')) buf = StringIO() with codecs.open(afile, 'r', charset, errors='ignore') as f: for i, line in enumerate(f.readlines()): if i < merge_skip_head: continue line = line.rstrip() if len(line) > 0: buf.write(u'{}{}{}\n'.format(lhead, sep, line)) print buf.getvalue().rstrip().encode('utf8') buf.close() if __name__ == "__main__": main()
mit
-994,023,040,797,049,200
31.740741
72
0.516968
false
3.860262
false
false
false
karstenw/nodebox-pyobjc
examples/Extended Application/matplotlib/examples/misc/tight_bbox_test.py
1
1357
""" =============== Tight Bbox Test =============== """ from __future__ import print_function import matplotlib.pyplot as plt import numpy as np # nodebox section if __name__ == '__builtin__': # were in nodebox import os import tempfile W = 800 inset = 20 size(W, 600) plt.cla() plt.clf() plt.close('all') def tempimage(): fob = tempfile.NamedTemporaryFile(mode='w+b', suffix='.png', delete=False) fname = fob.name fob.close() return fname imgx = 20 imgy = 0 def pltshow(plt, dpi=150): global imgx, imgy temppath = tempimage() plt.savefig(temppath, dpi=dpi) dx,dy = imagesize(temppath) w = min(W,dx) image(temppath,imgx,imgy,width=w) imgy = imgy + dy + 20 os.remove(temppath) size(W, HEIGHT+dy+40) else: def pltshow(mplpyplot): mplpyplot.show() # nodebox section end ax = plt.axes([0.1, 0.3, 0.5, 0.5]) ax.pcolormesh(np.array([[1, 2], [3, 4]])) plt.yticks([0.5, 1.5], ["long long tick label", "tick label"]) plt.ylabel("My y-label") plt.title("Check saved figures for their bboxes") for ext in ["png", "pdf", "svg", "svgz", "eps"]: print("saving tight_bbox_test.%s" % (ext,)) plt.savefig("tight_bbox_test.%s" % (ext,), bbox_inches="tight") pltshow(plt)
mit
-3,163,077,550,626,046,000
24.12963
82
0.560796
false
3.098174
false
false
false
harshavardhana/minio-py
setup.py
1
2454
# Minio Python Library for Amazon S3 Compatible Cloud Storage, (C) 2015 Minio, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re import sys from codecs import open try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() version = '' with open('minio/__init__.py', 'r') as fd: version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) with open('README.rst', 'r', 'utf-8') as f: readme = f.read() packages = [ 'minio', ] requires = [ 'urllib3', 'pytz', 'certifi<=2015.4.28', ] tests_requires = [ 'nose', 'mock', 'fake-factory', ] setup( name='minio', description='Minio Python Library for Amazon S3 Compatible Cloud Storage for Python', author='Minio, Inc.', url='https://github.com/minio/minio-py', download_url='https://github.com/minio/minio-py', author_email='dev@minio.io', version=version, package_dir={'minio': 'minio'}, packages=packages, install_requires=requires, tests_require=tests_requires, setup_requires=['nose>=1.0'], license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Libraries :: Python Modules', ], long_description=readme, package_data={'': ['LICENSE', 'README.rst']}, include_package_data=True, )
apache-2.0
3,117,691,991,688,107,500
28.214286
89
0.639364
false
3.729483
false
false
false
nicolacimmino/LoP-RAN
LoPAccessPoint/MacroIP.py
1
2793
# MacroIP is part of MacroIP Core. Provides Access to IP services through simple # textual macros. # Copyright (C) 2014 Nicola Cimmino # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # # This service expects a LoPNode connected on serial port ttyUSB0 and set # to access point mode already (ATAP1). In due time autodiscovery and # configuration will be built. import MacroIP_DHCP import MacroIP_UDP import MacroIP_ICMP import MacroIP_STUN import MacroIP_HTTP import MacroIP_MSGP2P outputMacrosQueue = [] # Processes one macro def processMacroIPMacro(clientid, macro): if macro.startswith("dhcp."): MacroIP_DHCP.processMacro(clientid, macro) if macro.startswith("udp."): MacroIP_UDP.processMacro(clientid, macro) if macro.startswith("stun."): MacroIP_STUN.processMacro(clientid, macro) if macro.startswith("icmp."): MacroIP_ICMP.processMacro(clientid, macro) if macro.startswith("http."): MacroIP_HTTP.processMacro(clientid, macro) if macro.startswith("msgp2p."): MacroIP_MSGP2P.processMacro(clientid, macro) # Fetches a macro to be sent to a client of the # host application. def getOutputMacroIPMacro(): (clientid, macro) = MacroIP_DHCP.getOutputMacroIPMacro() if(clientid != None): outputMacrosQueue.append((clientid, macro)) (clientid, macro) = MacroIP_UDP.getOutputMacroIPMacro() if(clientid != None): outputMacrosQueue.append((clientid, macro)) (clientid, macro) = MacroIP_ICMP.getOutputMacroIPMacro() if(clientid != None): outputMacrosQueue.append((clientid, macro)) (clientid, macro) = MacroIP_STUN.getOutputMacroIPMacro() if(clientid != None): outputMacrosQueue.append((clientid, macro)) (clientid, macro) = MacroIP_HTTP.getOutputMacroIPMacro() if(clientid != None): outputMacrosQueue.append((clientid, macro)) (clientid, macro) = MacroIP_MSGP2P.getOutputMacroIPMacro() if(clientid != None): outputMacrosQueue.append((clientid, macro)) if len(outputMacrosQueue) > 0: return outputMacrosQueue.pop(0) else: return (None, None) def startActivity(): MacroIP_UDP.startActivity() MacroIP_ICMP.startActivity()
gpl-3.0
-7,017,552,991,264,904,000
32.662651
80
0.723595
false
3.385455
false
false
false
MicroPyramid/Django-CRM
teams/api_views.py
1
8866
from django.contrib.sites.shortcuts import get_current_site from django.core.exceptions import PermissionDenied from django.db.models import Q from teams import swagger_params from teams.models import Teams from teams.tasks import update_team_users, remove_users from teams.serializer import TeamsSerializer, TeamCreateSerializer from common.models import User from common.custom_auth import JSONWebTokenAuthentication from common.serializer import UserSerializer from rest_framework import status from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated from rest_framework.pagination import LimitOffsetPagination from drf_yasg.utils import swagger_auto_schema import json class TeamsListView(APIView, LimitOffsetPagination): model = Teams authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) def get_context_data(self, **kwargs): params = ( self.request.query_params if len(self.request.data) == 0 else self.request.data ) queryset = self.model.objects.all() request_post = params if request_post: if request_post.get("team_name"): queryset = queryset.filter( name__icontains=request_post.get("team_name") ) if request_post.get("created_by"): queryset = queryset.filter(created_by=request_post.get("created_by")) if request_post.get("assigned_users"): queryset = queryset.filter( users__id__in=json.loads(request_post.get("assigned_users")) ) context = {} search = False if ( params.get("team_name") or params.get("created_by") or params.get("assigned_users") ): search = True context["search"] = search results_teams = self.paginate_queryset( queryset.distinct(), self.request, view=self ) teams = TeamsSerializer(results_teams, many=True).data context["per_page"] = 10 context.update( { "teams_count": self.count, "next": self.get_next_link(), "previous": self.get_previous_link(), "page_number": int(self.offset / 10) + 1, } ) context["teams"] = teams users = User.objects.filter( is_active=True, ).order_by("id") context["users"] = UserSerializer(users, many=True).data return context @swagger_auto_schema( tags=["Teams"], manual_parameters=swagger_params.teams_list_get_params ) def get(self, request, *args, **kwargs): if self.request.user.role != "ADMIN" and not self.request.user.is_superuser: return Response( { "error": True, "errors": "You don't have permission to perform this action.", }, status=status.HTTP_403_FORBIDDEN, ) context = self.get_context_data(**kwargs) return Response(context) @swagger_auto_schema( tags=["Teams"], manual_parameters=swagger_params.teams_create_post_params ) def post(self, request, *args, **kwargs): if self.request.user.role != "ADMIN" and not self.request.user.is_superuser: return Response( { "error": True, "errors": "You don't have permission to perform this action.", }, status=status.HTTP_403_FORBIDDEN, ) params = ( self.request.query_params if len(self.request.data) == 0 else self.request.data ) serializer = TeamCreateSerializer(data=params, request_obj=request) data = {} if serializer.is_valid(): team_obj = serializer.save(created_by=request.user) if params.get("assign_users"): assinged_to_users_ids = json.loads(params.get("assign_users")) for user_id in assinged_to_users_ids: user = User.objects.filter(id=user_id) if user.exists(): team_obj.users.add(user_id) else: team_obj.delete() data["users"] = "Please enter valid user" return Response( {"error": True, "errors": data}, status=status.HTTP_400_BAD_REQUEST, ) return Response( {"error": False, "message": "Team Created Successfully"}, status=status.HTTP_200_OK, ) return Response( {"error": True, "errors": serializer.errors}, status=status.HTTP_400_BAD_REQUEST, ) class TeamsDetailView(APIView): model = Teams authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) def get_object(self, pk): return self.model.objects.get(pk=pk) @swagger_auto_schema( tags=["Teams"], ) def get(self, request, pk, **kwargs): if self.request.user.role != "ADMIN" and not self.request.user.is_superuser: return Response( { "error": True, "errors": "You don't have permission to perform this action.", }, status=status.HTTP_403_FORBIDDEN, ) self.team_obj = self.get_object(pk) context = {} context["team"] = TeamsSerializer(self.team_obj).data context["users"] = UserSerializer( User.objects.filter(is_active=True).order_by("email"), many=True, ).data return Response(context) @swagger_auto_schema( tags=["Teams"], manual_parameters=swagger_params.teams_create_post_params ) def put(self, request, pk, *args, **kwargs): if self.request.user.role != "ADMIN" and not self.request.user.is_superuser: return Response( { "error": True, "errors": "You don't have permission to perform this action.", }, status=status.HTTP_403_FORBIDDEN, ) params = ( self.request.query_params if len(self.request.data) == 0 else self.request.data ) self.team = self.get_object(pk) actual_users = self.team.get_users() removed_users = [] serializer = TeamCreateSerializer( data=params, instance=self.team, request_obj=request ) data = {} if serializer.is_valid(): team_obj = serializer.save() team_obj.users.clear() if params.get("assign_users"): assinged_to_users_ids = json.loads(params.get("assign_users")) for user_id in assinged_to_users_ids: user = User.objects.filter(id=user_id) if user.exists(): team_obj.users.add(user_id) else: data["users"] = "Please enter valid user" return Response( {"error": True, "errors": data}, status=status.HTTP_400_BAD_REQUEST, ) update_team_users.delay(pk) latest_users = team_obj.get_users() for user in actual_users: if user in latest_users: pass else: removed_users.append(user) remove_users.delay(removed_users, pk) return Response( {"error": False, "message": "Team Updated Successfully"}, status=status.HTTP_200_OK, ) return Response( {"error": True, "errors": serializer.errors}, status=status.HTTP_400_BAD_REQUEST, ) @swagger_auto_schema( tags=["Teams"], ) def delete(self, request, pk, **kwargs): if self.request.user.role != "ADMIN" and not self.request.user.is_superuser: return Response( { "error": True, "errors": "You don't have permission to perform this action.", }, status=status.HTTP_403_FORBIDDEN, ) self.team_obj = self.get_object(pk) self.team_obj.delete() return Response( {"error": False, "message": "Team Deleted Successfully"}, status=status.HTTP_200_OK, )
mit
-466,532,188,539,851,400
35.636364
85
0.538236
false
4.395637
false
false
false
CaliOpen/CaliOpen
src/backend/interfaces/REST/py.server/caliopen_api/message/message.py
1
4304
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import logging from cornice.resource import resource, view from pyramid.response import Response from caliopen_main.message.objects.message import Message as ObjectMessage from caliopen_main.message.core import RawMessage from caliopen_storage.exception import NotFound from ..base import Api from ..base.exception import (ResourceNotFound, MergePatchError) from pyramid.httpexceptions import HTTPServerError, HTTPMovedPermanently from caliopen_pi.features import marshal_features log = logging.getLogger(__name__) @resource(collection_path='/messages', path='/messages/{message_id}') class Message(Api): def __init__(self, request): self.request = request self.user = request.authenticated_userid @view(renderer='json', permission='authenticated') def collection_post(self): data = self.request.json if 'privacy_features' in data: features = marshal_features(data['privacy_features']) data['privacy_features'] = features # ^ json payload should have been validated by swagger module try: message = ObjectMessage.create_draft(user=self.user, **data) except Exception as exc: log.exception(exc) raise MergePatchError(error=exc) message_url = self.request.route_path('message', message_id=str( message.message_id)) message_url = message_url.replace("/v1/", "/v2/") self.request.response.location = message_url.encode('utf-8') return {'location': message_url} @view(renderer='json', permission='authenticated') def patch(self): """Update a message with payload. method follows the rfc5789 PATCH and rfc7396 Merge patch specifications, + 'current_state' caliopen's specs. stored messages are modified according to the fields within the payload, ie payload fields squash existing db fields, no other modification done. If message doesn't existing, response is 404. If payload fields are not conform to the message db schema, response is 422 (Unprocessable Entity). Successful response is 204, without a body. """ message_id = self.request.swagger_data["message_id"] patch = self.request.json if 'privacy_features' in patch: features = marshal_features(patch['privacy_features']) patch['privacy_features'] = features if 'privacy_features' in patch.get('current_state', {}): current = patch['current_state']['privacy_features'] features = marshal_features(current) patch['current_state']['privacy_features'] = features message = ObjectMessage(user=self.user, message_id=message_id) try: message.patch_draft(self.user, patch, db=True, index=True, with_validation=True) except Exception as exc: raise MergePatchError(exc) return Response(None, 204) @view(renderer='json', permission='authenticated') def delete(self): message_id = self.request.swagger_data["message_id"] message = ObjectMessage(user=self.user, message_id=message_id) try: message.get_db() message.get_index() except NotFound: raise ResourceNotFound try: message.delete_db() message.delete_index() except Exception as exc: raise HTTPServerError(exc) return Response(None, 204) @resource(path='/raws/{raw_msg_id}') class Raw(Api): """returns a raw message""" def __init__(self, request): self.request = request self.user = request.authenticated_userid @view(renderer='text_plain', permission='authenticated') def get(self): # XXX how to check privacy_index ? raw_msg_id = self.request.matchdict.get('raw_msg_id') raw = RawMessage.get_for_user(self.user.user_id, raw_msg_id) if raw: return raw.raw_data raise ResourceNotFound('No such message')
gpl-3.0
-7,047,381,982,959,401,000
35.474576
80
0.62895
false
4.369543
false
false
false
cloud-ark/cloudark
server/gcloud_handler.py
1
4809
import ast from os.path import expanduser from stevedore import extension from common import common_functions from common import fm_logger from dbmodule.objects import app as app_db from dbmodule.objects import environment as env_db from server.server_plugins.gcloud import gcloud_helper home_dir = expanduser("~") APP_AND_ENV_STORE_PATH = ("{home_dir}/.cld/data/deployments/").format(home_dir=home_dir) fmlogger = fm_logger.Logging() class GCloudHandler(object): res_mgr = extension.ExtensionManager( namespace='server.server_plugins.gcloud.resource', invoke_on_load=True, ) coe_mgr = extension.ExtensionManager( namespace='server.server_plugins.gcloud.coe', invoke_on_load=True, ) app_mgr = extension.ExtensionManager( namespace='server.server_plugins.gcloud.app', invoke_on_load=True, ) gcloudhelper = gcloud_helper.GCloudHelper() def create_resources(self, env_id, resource_list): fmlogger.debug("GCloudHandler create_resources") resource_details = '' ret_status_list = [] for resource_defs in resource_list: resource_details = resource_defs['resource'] type = resource_details['type'] env_db.Environment().update(env_id, {'status': 'creating_' + type}) for name, ext in GCloudHandler.res_mgr.items(): if name == type: status = ext.obj.create(env_id, resource_details) if status: ret_status_list.append(status) return ret_status_list def delete_resource(self, env_id, resource): fmlogger.debug("GCloudHandler delete_resource") type = resource.type env_db.Environment().update(env_id, {'status': 'deleting_' + type}) for name, ext in GCloudHandler.res_mgr.items(): if name == type: ext.obj.delete(resource) def run_command(self, env_id, env_name, resource, command_string): fmlogger.debug("GCloudHandler run_command") type = resource.type command_type = GCloudHandler.gcloudhelper.resource_type_for_command(command_string) command_output_all = [] for name, ext in GCloudHandler.res_mgr.items(): if name == type: if name == command_type or command_string == 'help': command_output = ext.obj.run_command(env_id, env_name, resource, command_string) command_output_all.extend(command_output) coe_type = common_functions.get_coe_type(env_id) for name, ext in GCloudHandler.coe_mgr.items(): if name == coe_type: if name == command_type or command_string == 'help': command_output = ext.obj.run_command(env_id, env_name, resource, command_string) command_output_all.extend(command_output) return command_output_all def create_cluster(self, env_id, env_info): coe_type = common_functions.get_coe_type(env_id) for name, ext in GCloudHandler.coe_mgr.items(): if name == coe_type: status = ext.obj.create_cluster(env_id, env_info) return status def delete_cluster(self, env_id, env_info, resource): coe_type = common_functions.get_coe_type(env_id) for name, ext in GCloudHandler.coe_mgr.items(): if name == coe_type: ext.obj.delete_cluster(env_id, env_info, resource) def create_container(self, cont_name, cont_info): repo_type = cont_info['dep_target'] for name, ext in GCloudHandler.res_mgr.items(): if name == repo_type: ext.obj.create(cont_name, cont_info) def delete_container(self, cont_name, cont_info): repo_type = cont_info['dep_target'] for name, ext in GCloudHandler.res_mgr.items(): if name == repo_type: ext.obj.delete(cont_name, cont_info) # App functions def deploy_application(self, app_id, app_info): app_type = common_functions.get_app_type(app_id) for name, ext in GCloudHandler.app_mgr.items(): if name == app_type: ext.obj.deploy_application(app_id, app_info) def delete_application(self, app_id, app_info): app_type = common_functions.get_app_type(app_id) for name, ext in GCloudHandler.app_mgr.items(): if name == app_type: ext.obj.delete_application(app_id, app_info) def get_logs(self, app_id, app_info): log_lines = '' app_type = common_functions.get_app_type(app_id) for name, ext in GCloudHandler.app_mgr.items(): if name == app_type: log_lines = ext.obj.get_logs(app_id, app_info) return log_lines
apache-2.0
-7,426,348,535,844,958,000
36.578125
100
0.614681
false
3.657034
false
false
false
kiruto/Weixin-Article-Spider
storage/sqlite_storage.py
1
6856
# -*- coding: utf-8 -*- import hashlib import json import re import sqlite3 import time import datetime import common from storage import db version = '1.0' class SQLiteStorage: def __init__(self): self._connect = sqlite3.connect(db) self._connect.text_factory = str self._create_table() def subscribe(self, wxid): c = self._connect.cursor() c.execute("INSERT INTO wxid(name) VALUES (?)", [wxid]) self._connect.commit() c.close() def unsubscribe(self, wxid): c = self._connect.cursor() c.execute("DELETE FROM wxid WHERE name=?", [wxid]) self._connect.commit() c.close() def batch_subscribe(self, id_list): c = self._connect.cursor() data = [] for i in id_list: i = i.strip() if len(i) == 0: continue if not common.is_wxid(i): continue p = (i, ) data.append(p) try: c.executemany("INSERT OR REPLACE INTO wxid(name) VALUES (?)", data) self._connect.commit() except Exception as e: print(e) common.save_raw_error_log(exception=e) c.close() def edit_extra(self, wxid, extra_dict): """ :param wxid: string 微信id :type extra_dict: dict name: 公众号名称 wechatid: 公众号id jieshao: 介绍 renzhen: 认证,为空表示未认证 qrcode: 二维码 img: 头像图片 url: 最近文章地址 """ if not wxid or not extra_dict: return if isinstance(extra_dict, dict): extra_dict['version'] = version extra = json.dumps(extra_dict) c = self._connect.cursor() c.execute("UPDATE wxid SET extra=? WHERE name=?", [extra, wxid]) self._connect.commit() c.close() def get_wxid_list(self): c = self._connect.cursor() result = c.execute("SELECT * FROM wxid").fetchall() c.close() result_list = list() for r in result: result_list.append(WXIDRecord(r)) result_list.reverse() return result_list def insert_article(self, article, local_url, author_name=''): c = self._connect.cursor() m = hashlib.md5() m.update(article['title']) hash_id = m.hexdigest() date_time = time.localtime(int(article['datetime'])) date_time = time.strftime("%Y-%m-%d", date_time) extra = json.dumps(article) data = (hash_id, date_time, article['title'], "", extra, local_url, version, author_name) c.execute("""INSERT INTO article(hash_id, date_time, title, info, extra, content, version, author) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""", data) self._connect.commit() c.close() def get_article(self, hash_id): c = self._connect.cursor() result = c.execute("SELECT * FROM article WHERE hash_id=?", [hash_id]).fetchone() c.close() if not result: return None else: return ArticleRecord(result) def get_articles_by_date_created(self, date): c = self._connect.cursor() result = c.execute("SELECT * FROM article" " WHERE created_at BETWEEN date(?) AND date(?, '+1 day')", [date, date]).fetchall() articles = list() for r in result: articles.append(ArticleRecord(r)) c.close() return articles def get_articles_by_date_written(self, date): c = self._connect.cursor() result = c.execute("SELECT * FROM article WHERE date_time=?", [date]).fetchall() articles = list() for r in result: articles.append(ArticleRecord(r)) c.close() return articles def get_articles_by_author(self, author): c = self._connect.cursor() result = c.execute("SELECT * FROM article WHERE author=?", [author]).fetchall() articles = list() for r in result: articles.append(ArticleRecord(r)) c.close() return articles def get_date_by_created(self): d = datetime.datetime.now() offset = datetime.timedelta(days=7) day = d - offset date_from = datetime.datetime(day.year, day.month, day.day, 0, 0, 0) date = str(date_from) c = self._connect.cursor() result = c.execute("SELECT strftime('%Y-%m-%d', created_at) FROM article" " WHERE datetime(created_at)>=datetime(?)" " GROUP BY strftime('%Y-%m-%d', created_at)", [date]).fetchall() return result def get_date_by_written(self): d = datetime.datetime.now() offset = datetime.timedelta(days=7) day = d - offset date_from = datetime.datetime(day.year, day.month, day.day, 0, 0, 0) date = str(date_from) c = self._connect.cursor() result = c.execute("SELECT strftime('%Y-%m-%d', date_time) FROM article" " WHERE datetime(date_time)>=datetime(?)" " GROUP BY strftime('%Y-%m-%d', date_time)", [date]).fetchall() return result def close(self): self._connect.close() def _create_table(self): c = self._connect.cursor() create_table_article = """CREATE TABLE IF NOT EXISTS article ( hash_id text PRIMARY KEY, date_time text, created_at text NOT NULL DEFAULT (datetime('now', 'localtime')), title text, info text, extra text, content text, version text, author text)""" create_table_wxid = "CREATE TABLE IF NOT EXISTS wxid (name text PRIMARY KEY, extra text)" c.execute(create_table_article) c.execute(create_table_wxid) self._connect.commit() c.close() class WXIDRecord(dict): def __init__(self, row, **kwargs): super(WXIDRecord, self).__init__(name=row[0], extra=row[1], **kwargs) if self['extra']: try: self['extra'] = json.loads(self['extra']) except Exception as e: print(e) class ArticleRecord(dict): def __init__(self, row, **kwargs): """ :param row: 从SELECT * FROM articles中出来的原始结果 :param kwargs: """ super(ArticleRecord, self).__init__( hash_id=row[0], date_time=row[1], created_at=row[2], title=row[3], info=row[4], extra=row[5], content=row[6], version=row[7], author=row[8], **kwargs) self['extra'] = json.loads(self['extra'])
gpl-3.0
-1,672,044,169,058,816,300
30.774648
110
0.531324
false
3.787353
false
false
false
havencruise/django-utils
templatetags/fieldset_form.py
1
1863
from django import template register = template.Library() @register.filter('get_form_field') def get_form_field(form, field): return form[field] @register.inclusion_tag('form_as_fieldset.html') def form_as_fieldset_fields(form, fieldsets=None): """ Render the form as a fieldset form. Example usage in template with 'myform' and 'myfieldsets as context attributes: {% form_as_fieldset_fields myform myfieldsets %} Sample fieldset: MY_FIELDSETS = ( ( 'info', ('first_name', 'middle_name', 'last_name', 'is_published') ), ( 'image', ('profile_image', 'avatar_image', 'profile_image_crop') ), ( 'profile', ('title', 'location', 'profile_full', 'profile_brief', 'website_url', 'average_artwork_cost', 'born_year', 'deceased_year') ), ( 'focus area', ('styles', 'mediums') ) ) """ if not fieldsets: fieldsets = ( ( '', tuple(form.fields.keys()), ), ) return {'form': form, 'fieldsets' : fieldsets} @register.filter('field_type') def field_type(field): """ Get the name of the field class. """ if hasattr(field, 'field'): field = field.field s = (type(field.widget).__name__).replace('Input', '').lower() return s @register.filter('strongify') def strongify(name): """ Takes a string and returns formatted strong version as in the example: Input: "My name is" Output: "My <strong> name is </strong>" """ names = name.split(' ') if names[1:]: strong_string = "<strong>" + " ".join(names[1:]) + "</strong>" return names[0] +" " + strong_string else: return name
mit
-6,858,266,544,679,391,000
24.534247
83
0.531938
false
3.825462
false
false
false
codycollier/booster
test/test_appserver_create_webdav.py
1
5138
#!/usr/bin/env python import time import unittest import boostertest class TestAppserverCreateWebdav(boostertest.BoosterTestCase): """ Test the appserver-create-webdav action """ def setUp(self): """ Set the action and other commonly used fixture data """ self.params = {} self.params['action'] = "appserver-create-webdav" self.params['appserver-name'] = "some-web-app" self.params['group-name'] = "Default" self.params['database-name'] = "Documents" self.params['root'] = "/Docs" self.params['port'] = "8801" # collect app server names for later teardown self.teardown_appservers = [] def tearDown(self): """ Remove items from server created during tests """ params = {} params['action'] = "appserver-delete" params['group-name'] = "Default" for appserver in self.teardown_appservers: params['appserver-name'] = appserver response, body = self.booster.request(params) self.assertTrue(response.status in (404, 200)) time.sleep(3) def test_basic_webdav_appserver_creation_results_in_201(self): """ A successful webdav appserver creation should result in a 201 """ params = self.params params['appserver-name'] = "webdav-loops" self.teardown_appservers.append("webdav-loops") response, body = self.booster.request(params) err = response.get("x-booster-error", "none") self.assertEqual(response.status, 201) self.assertEqual(err, "none") time.sleep(3) def test_create_webdav_appserver_with_existing_name_results_in_409(self): """ Attempting to create a pre-existing webdav appserver should result in 409 """ params = self.params params['appserver-name'] = "grape-nuts" self.teardown_appservers.append("grape-nuts") # create the appserver response, body = self.booster.request(params) self.assertEqual(response.status, 201) time.sleep(3) # second create should result in 409 response, body = self.booster.request(params) err = response.get("x-booster-error", "none") self.assertEqual(response.status, 409) self.assertTrue(err.find("already exists") != -1) def test_create_webdav_appserver_in_nonexistent_group_results_in_500(self): """ An appserver-create-webdav should fail with 500 if group does not exist """ params = self.params params['appserver-name'] = "webdav-crunch" params['group-name'] = "there-is-no-such-group" self.teardown_appservers.append("webdav-crunch") response, body = self.booster.request(params) err = response.get("x-booster-error", "none") self.assertEqual(response.status, 500) self.assertTrue(err.find("Error running action 'appserver-create-webdav'. Error: No such group") > -1) def test_create_webdav_appserver_with_invalid_name_results_in_500(self): """ An appserver-create-webdav with invalid appserver-name should be rejected by api and result in 500 """ badnames = ("%%zxcggg", "$fbbhhjh$") for badname in badnames: params = self.params params['appserver-name'] = badname # create should result in 500 response, body = self.booster.request(params) err = response.get("x-booster-error", "none") self.assertEqual(response.status, 500) self.assertTrue(err.find("Error running action 'appserver-create-webdav'") != -1) self.assertTrue(err.find("Error: Invalid lexical value") != -1) def test_create_webdav_appserver_with_missing_required_parameter_results_in_400(self): """ A missing but required parameters should result in 400 """ required_parameters = ("appserver-name", "group-name", "database-name", "root", "port") for rp in required_parameters: params = self.params.copy() del params[rp] response, body = self.booster.request(params) err = response.get("x-booster-error", "") self.assertEqual(response.status, 400) self.assertTrue(err.find("valid set of arguments was not provided") != 1) def test_create_webdav_appserver_with_empty_required_parameter_results_in_500(self): """ An empty but required parameters should result in 500 """ required_parameters = ("appserver-name", "group-name", "database-name", "root", "port") for rp in required_parameters: params = self.params.copy() params[rp] = "" # create should result in 500 response, body = self.booster.request(params) err = response.get("x-booster-error", "none") self.assertEqual(response.status, 500) self.assertTrue(err.find("Error running action 'appserver-create-webdav'") != -1) self.assertTrue(err.find("Error: ") != -1) if __name__=="__main__": unittest.main()
apache-2.0
-315,975,134,436,366,100
43.293103
114
0.619502
false
3.910198
true
false
false
rickerc/cinder_audit
cinder/tests/keymgr/mock_key_mgr.py
1
4301
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A mock implementation of a key manager that stores keys in a dictionary. This key manager implementation is primarily intended for testing. In particular, it does not store keys persistently. Lack of a centralized key store also makes this implementation unsuitable for use among different services. Note: Instantiating this class multiple times will create separate key stores. Keys created in one instance will not be accessible from other instances of this class. """ import array from cinder import exception from cinder.keymgr import key from cinder.keymgr import key_mgr from cinder.openstack.common import uuidutils from cinder import utils class MockKeyManager(key_mgr.KeyManager): """ This mock key manager implementation supports all the methods specified by the key manager interface. This implementation stores keys within a dictionary, and as a result, it is not acceptable for use across different services. Side effects (e.g., raising exceptions) for each method are handled as specified by the key manager interface. This key manager is not suitable for use in production deployments. """ def __init__(self): self.keys = {} def _generate_hex_key(self, **kwargs): key_length = kwargs.get('key_length', 256) # hex digit => 4 bits hex_encoded = utils.generate_password(length=key_length / 4, symbolgroups='0123456789ABCDEF') return hex_encoded def _generate_key(self, **kwargs): _hex = self._generate_hex_key(**kwargs) return key.SymmetricKey('AES', array.array('B', _hex.decode('hex')).tolist()) def create_key(self, ctxt, **kwargs): """Creates a key. This implementation returns a UUID for the created key. A NotAuthorized exception is raised if the specified context is None. """ if ctxt is None: raise exception.NotAuthorized() key = self._generate_key(**kwargs) return self.store_key(ctxt, key) def _generate_key_id(self): key_id = uuidutils.generate_uuid() while key_id in self.keys: key_id = uuidutils.generate_uuid() return key_id def store_key(self, ctxt, key, **kwargs): """Stores (i.e., registers) a key with the key manager.""" if ctxt is None: raise exception.NotAuthorized() key_id = self._generate_key_id() self.keys[key_id] = key return key_id def copy_key(self, ctxt, key_id, **kwargs): if ctxt is None: raise exception.NotAuthorized() copied_key_id = self._generate_key_id() self.keys[copied_key_id] = self.keys[key_id] return copied_key_id def get_key(self, ctxt, key_id, **kwargs): """Retrieves the key identified by the specified id. This implementation returns the key that is associated with the specified UUID. A NotAuthorized exception is raised if the specified context is None; a KeyError is raised if the UUID is invalid. """ if ctxt is None: raise exception.NotAuthorized() return self.keys[key_id] def delete_key(self, ctxt, key_id, **kwargs): """Deletes the key identified by the specified id. A NotAuthorized exception is raised if the context is None and a KeyError is raised if the UUID is invalid. """ if ctxt is None: raise exception.NotAuthorized() del self.keys[key_id]
apache-2.0
2,807,719,877,161,916,400
33.685484
78
0.662404
false
4.271102
false
false
false
lingtools/lingtools
lingtools/util/datamanager.py
1
1638
""" Functions for managing data. """ # Copyright 2011-2013 Constantine Lignos # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import urllib2 import posixpath import zipfile def download(url, path=None): """Download a url, save under the same filename or the specified path, and return the path.""" print "Downloading %s..." % url try: url_file = urllib2.urlopen(url) except urllib2.HTTPError: raise IOError("Couldn't open URL %s." % repr(url)) # Use the provided path, or default to the basename filename = path if path else posixpath.basename(url) try: local_file = open(filename, 'wb') local_file.write(url_file.read()) local_file.close() except IOError: raise IOError("Couldn't write filename %s." % repr(filename)) return filename def unzip(filepath, destpath='.'): """Unzip a file.""" print "Unzipping %s..." % repr(filepath) try: zfile = zipfile.ZipFile(filepath, 'r') except (IOError, zipfile.BadZipfile): raise IOError("The zip file %s could not be opened." % repr(filepath)) zfile.extractall(destpath)
apache-2.0
6,173,010,345,642,779,000
30.5
98
0.686813
false
3.956522
false
false
false
AmritaLonkar/trunk
SU2_PY/SU2/run/adaptation.py
2
1063
import os, sys, shutil, copy from .. import io as su2io from .. import mesh as su2mesh from decompose import decompose as su2decomp def adaptation ( config , kind='' ): # local copy konfig = copy.deepcopy(config) # check kind if kind: konfig['KIND_ADAPT'] = kind kind = konfig.get('KIND_ADAPT','NONE') if kind == 'NONE': return {} # check adapted? # decompose su2decomp(konfig) # get adaptation function adapt_function = su2mesh.adapt.name_map[kind] # setup problem suffix = 'adapt' meshname_orig = konfig['MESH_FILENAME'] meshname_new = su2io.add_suffix( konfig['MESH_FILENAME'], suffix ) konfig['MESH_OUT_FILENAME'] = meshname_new # Run Adaptation info = adapt_function(konfig) # update super config config['MESH_FILENAME'] = meshname_new config['KIND_ADAPT'] = kind # files out files = { 'MESH' : meshname_new } # info out append_nestdict( info, { 'FILES' : files } ) return info
gpl-2.0
-1,794,196,554,823,683,300
20.714286
71
0.597366
false
3.396166
false
false
false
jmgc/myhdl-numeric
myhdl/_extractHierarchy.py
1
15467
# This file is part of the myhdl library, a Python package for using # Python as a Hardware Description Language. # # Copyright (C) 2003-2008 Jan Decaluwe # # The myhdl library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation; either version 2.1 of the # License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ myhdl _extractHierarchy module. """ from __future__ import absolute_import import inspect import re import string import sys import ast from ._errors import ExtractHierarchyError, ToVerilogError, ToVHDLError from ._enum import EnumItemType from ._Signal import _Signal, _isListOfSigs from ._compat import integer_types from ._getcellvars import _getCellVars from ._misc import _isGenSeq from ._resolverefs import _resolveRefs from ._util import _flatten, _genfunc, _isTupleOfInts, _isTupleOfFloats _profileFunc = None class _error: pass _error.NoInstances = "No instances found" _error.InconsistentHierarchy = "Inconsistent hierarchy - are all" \ " instances returned ?" _error.InconsistentToplevel = "Inconsistent top level %s for %s - should be 1" class _Constant(object): def __init__(self, orig_name, value): self.name = None self.orig_name = orig_name self.instance = None self.value = value self.used = False class _Instance(object): __slots__ = ['level', 'obj', 'subs', 'constdict', 'sigdict', 'memdict', 'romdict', 'name', 'func', 'frame', ] def __init__(self, level, obj, subs, constdict, sigdict, memdict, romdict, func, frame): self.level = level self.obj = obj self.subs = subs self.constdict = constdict self.sigdict = sigdict self.memdict = memdict self.romdict = romdict self.func = func self.frame = frame self.name = None _memInfoMap = {} class _MemInfo(object): __slots__ = ['mem', 'name', 'elObj', 'depth', 'type', '_used', '_driven', '_read'] def __init__(self, mem): self.mem = mem self.name = None self.depth = len(mem) self.elObj = mem[0] self.type = None self._used = False self._driven = None self._read = False @property def used(self): return self._used @used.setter def used(self, val): self._used = bool(val) for s in self.mem: s._used = bool(val) def _clear(self): self._driven = None self._read = False for el in self.mem: el._clear() def _getMemInfo(mem): return _memInfoMap[id(mem)] def _makeMemInfo(mem): key = id(mem) if key not in _memInfoMap: _memInfoMap[key] = _MemInfo(mem) return _memInfoMap[key] def _isMem(mem): return id(mem) in _memInfoMap _romInfoMap = {} class _RomInfo(object): __slots__ = ['mem', 'orig_name', 'name', 'elObj', 'depth', 'type', '_used'] def __init__(self, orig_name, mem): self.mem = mem self.orig_name = orig_name self.name = None self.depth = len(mem) if (self.depth > 0): if isinstance(mem[0], integer_types): for elObj in mem: if elObj < 0: break else: elObj = mem[0] self.elObj = elObj else: self.elObj = None self.type = None self._used = False @property def used(self): return self._used @used.setter def used(self, val): self._used = bool(val) def _getRomInfo(mem): return _romInfoMap[id(mem)] def _makeRomInfo(n, mem): key = id(mem) if key not in _romInfoMap: _romInfoMap[key] = _RomInfo(n, mem) return _romInfoMap[key] def _isRom(mem): return id(mem) in _romInfoMap _userCodeMap = {'verilog': {}, 'vhdl': {} } class _UserCode(object): __slots__ = ['code', 'namespace', 'funcname', 'func', 'sourcefile', 'sourceline'] def __init__(self, code, namespace, funcname, func, sourcefile, sourceline): self.code = code self.namespace = namespace self.sourcefile = sourcefile self.func = func self.funcname = funcname self.sourceline = sourceline def __str__(self): try: code = self._interpolate() except: tipe, value, _ = sys.exc_info() info = "in file %s, function %s starting on line %s:\n " % \ (self.sourcefile, self.funcname, self.sourceline) msg = "%s: %s" % (tipe, value) self.raiseError(msg, info) code = "\n%s\n" % code return code def _interpolate(self): return string.Template(self.code).substitute(self.namespace) class _UserCodeDepr(_UserCode): def _interpolate(self): return self.code % self.namespace class _UserVerilogCode(_UserCode): def raiseError(self, msg, info): raise ToVerilogError("Error in user defined Verilog code", msg, info) class _UserVhdlCode(_UserCode): def raiseError(self, msg, info): raise ToVHDLError("Error in user defined VHDL code", msg, info) class _UserVerilogCodeDepr(_UserVerilogCode, _UserCodeDepr): pass class _UserVhdlCodeDepr(_UserVhdlCode, _UserCodeDepr): pass class _UserVerilogInstance(_UserVerilogCode): def __str__(self): args = inspect.getargspec(self.func)[0] s = "%s %s(" % (self.funcname, self.code) sep = '' for arg in args: if arg in self.namespace and isinstance(self.namespace[arg], _Signal): signame = self.namespace[arg]._name s += sep sep = ',' s += "\n .%s(%s)" % (arg, signame) s += "\n);\n\n" return s class _UserVhdlInstance(_UserVhdlCode): def __str__(self): args = inspect.getargspec(self.func)[0] s = "%s: entity work.%s(MyHDL)\n" % (self.code, self.funcname) s += " port map (" sep = '' for arg in args: if arg in self.namespace and isinstance(self.namespace[arg], _Signal): signame = self.namespace[arg]._name s += sep sep = ',' s += "\n %s=>%s" % (arg, signame) s += "\n );\n\n" return s def _addUserCode(specs, arg, funcname, func, frame): classMap = { '__verilog__': _UserVerilogCodeDepr, '__vhdl__': _UserVhdlCodeDepr, 'verilog_code': _UserVerilogCode, 'vhdl_code': _UserVhdlCode, 'verilog_instance': _UserVerilogInstance, 'vhdl_instance': _UserVhdlInstance, } namespace = frame.f_globals.copy() namespace.update(frame.f_locals) sourcefile = inspect.getsourcefile(frame) sourceline = inspect.getsourcelines(frame)[1] for hdl in _userCodeMap: oldspec = "__%s__" % hdl codespec = "%s_code" % hdl instancespec = "%s_instance" % hdl spec = None # XXX add warning logic if instancespec in specs: spec = instancespec elif codespec in specs: spec = codespec elif oldspec in specs: spec = oldspec if spec: assert id(arg) not in _userCodeMap[hdl] code = specs[spec] _userCodeMap[hdl][id(arg)] = classMap[spec](code, namespace, funcname, func, sourcefile, sourceline) class _CallFuncVisitor(object): def __init__(self): self.linemap = {} def visitAssign(self, node): if isinstance(node.expr, ast.Call): self.lineno = None self.visit(node.expr) self.linemap[self.lineno] = node.lineno def visitName(self, node): self.lineno = node.lineno class _HierExtr(object): def __init__(self, name, dut, *args, **kwargs): global _profileFunc _memInfoMap.clear() for hdl in _userCodeMap: _userCodeMap[hdl].clear() self.skipNames = ('always_comb', 'instance', 'always_seq', '_always_seq_decorator', 'always', '_always_decorator', 'instances', 'processes', 'posedge', 'negedge') self.skip = 0 self.hierarchy = hierarchy = [] self.absnames = absnames = {} self.level = 0 _profileFunc = self.extractor sys.setprofile(_profileFunc) _top = dut(*args, **kwargs) sys.setprofile(None) if not hierarchy: raise ExtractHierarchyError(_error.NoInstances) self.top = _top # streamline hierarchy hierarchy.reverse() # walk the hierarchy to define relative and absolute names names = {} top_inst = hierarchy[0] obj, subs = top_inst.obj, top_inst.subs names[id(obj)] = name absnames[id(obj)] = name if not top_inst.level == 1: raise ExtractHierarchyError(_error.InconsistentToplevel % (top_inst.level, name)) for inst in hierarchy: obj, subs = inst.obj, inst.subs if id(obj) not in names: raise ExtractHierarchyError(_error.InconsistentHierarchy) inst.name = names[id(obj)] tn = absnames[id(obj)] for sn, so in subs: names[id(so)] = sn absnames[id(so)] = "%s_%s" % (tn, sn) if isinstance(so, (tuple, list)): for i, soi in enumerate(so): sni = "%s_%s" % (sn, i) names[id(soi)] = sni absnames[id(soi)] = "%s_%s_%s" % (tn, sn, i) def extractor(self, frame, event, arg): if event == "call": funcname = frame.f_code.co_name # skip certain functions if funcname in self.skipNames: self.skip += 1 if not self.skip: self.level += 1 elif event == "return": funcname = frame.f_code.co_name func = frame.f_globals.get(funcname) if func is None: # Didn't find a func in the global space, try the local "self" # argument and see if it has a method called *funcname* obj = frame.f_locals.get('self') if hasattr(obj, funcname): func = getattr(obj, funcname) if not self.skip: isGenSeq = _isGenSeq(arg) if isGenSeq: specs = {} for hdl in _userCodeMap: spec = "__%s__" % hdl if spec in frame.f_locals and frame.f_locals[spec]: specs[spec] = frame.f_locals[spec] spec = "%s_code" % hdl if func and hasattr(func, spec) and \ getattr(func, spec): specs[spec] = getattr(func, spec) spec = "%s_instance" % hdl if func and hasattr(func, spec) and \ getattr(func, spec): specs[spec] = getattr(func, spec) if specs: _addUserCode(specs, arg, funcname, func, frame) # building hierarchy only makes sense if there are generators if isGenSeq and arg: constdict = {} sigdict = {} memdict = {} romdict = {} symdict = frame.f_globals.copy() symdict.update(frame.f_locals) cellvars = [] # All nested functions will be in co_consts if func: local_gens = [] consts = func.__code__.co_consts for item in _flatten(arg): genfunc = _genfunc(item) if genfunc.__code__ in consts: local_gens.append(item) if local_gens: cellvarlist = _getCellVars(symdict, local_gens) cellvars.extend(cellvarlist) objlist = _resolveRefs(symdict, local_gens) cellvars.extend(objlist) for n, v in symdict.items(): # extract signals and memories # also keep track of whether they are used in # generators only include objects that are used in # generators if isinstance(v, _Signal): sigdict[n] = v if n in cellvars: v._markUsed() elif isinstance(v, (integer_types, float, EnumItemType)): constdict[n] = _Constant(n, v) elif _isListOfSigs(v): m = _makeMemInfo(v) memdict[n] = m if n in cellvars: m._used = True elif _isTupleOfInts(v): m = _makeRomInfo(n, v) romdict[n] = m if n in cellvars: m._used = True elif _isTupleOfFloats(v): m = _makeRomInfo(n, v) romdict[n] = m if n in cellvars: m._used = True subs = [] for n, sub in frame.f_locals.items(): for elt in _inferArgs(arg): if elt is sub: subs.append((n, sub)) inst = _Instance(self.level, arg, subs, constdict, sigdict, memdict, romdict, func, frame) self.hierarchy.append(inst) self.level -= 1 if funcname in self.skipNames: self.skip -= 1 def _inferArgs(arg): c = [arg] if isinstance(arg, (tuple, list)): c += list(arg) return c
lgpl-2.1
2,900,433,588,535,602,000
31.022774
79
0.498222
false
4.115753
false
false
false
tseaver/google-cloud-python
bigquery/samples/query_script.py
1
2249
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def query_script(client): # [START bigquery_query_script] # TODO(developer): Import the client library. # from google.cloud import bigquery # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() # Run a SQL script. sql_script = """ -- Declare a variable to hold names as an array. DECLARE top_names ARRAY<STRING>; -- Build an array of the top 100 names from the year 2017. SET top_names = ( SELECT ARRAY_AGG(name ORDER BY number DESC LIMIT 100) FROM `bigquery-public-data.usa_names.usa_1910_2013` WHERE year = 2000 ); -- Which names appear as words in Shakespeare's plays? SELECT name AS shakespeare_name FROM UNNEST(top_names) AS name WHERE name IN ( SELECT word FROM `bigquery-public-data.samples.shakespeare` ); """ parent_job = client.query(sql_script) # Wait for the whole script to finish. rows_iterable = parent_job.result() print("Script created {} child jobs.".format(parent_job.num_child_jobs)) # Fetch result rows for the final sub-job in the script. rows = list(rows_iterable) print( "{} of the top 100 names from year 2000 also appear in Shakespeare's works.".format( len(rows) ) ) # Fetch jobs created by the SQL script. child_jobs_iterable = client.list_jobs(parent_job=parent_job) for child_job in child_jobs_iterable: child_rows = list(child_job.result()) print( "Child job with ID {} produced {} row(s).".format( child_job.job_id, len(child_rows) ) ) # [END bigquery_query_script]
apache-2.0
-3,223,353,715,176,114,000
31.594203
92
0.665185
false
3.805415
false
false
false
fhinkel/TweetOptimizer
python/flask_API.py
1
3540
# -*- coding: utf-8 -*- ''' Created on Sep 29, 2014 @author: tim ''' from flask import Flask from flask import request from flask import Response from flask.ext.cors import CORS from relation_calculator import Relation_Calculator import sys import json import re from crossdomain import crossdomain http_regex = re.compile(r'''(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’]))''', re.DOTALL) app = Flask(__name__) cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) base_path = sys.argv[0].replace('flask_API.py','') rel = Relation_Calculator() def replacePunctuation(text): text = http_regex.sub('',text) text = text.replace(',','').replace('.','').replace(';','').replace('(','').replace(')','') text = text.replace(':','').replace('!','').replace('?','').replace('RT','') return text def getRelatedTerms(search_term, level): keywords = rel.get_keywords(search_term.lower(), searchtype = level) data = [] print keywords for items in keywords: dictValue = {} dictValue['tag'] = items[0] dictValue['ratio'] = items[1] dictValue['confidence'] = items[2] data.append(dictValue) return data @app.route('/relatedHashtags', methods=['OPTIONS', 'GET', 'POST']) @crossdomain(origin='*') def getRelatedHashtags(): global rel jsondata = request.get_json(force=True) hashtag = jsondata['term'] return Response(json.dumps(getRelatedTerms(hashtag, 0)), mimetype='application/json') @app.route('/tweetToKeywordList', methods=['OPTIONS', 'GET', 'POST']) @crossdomain(origin='*') def tweetToRelatedWords(): global rel jsondata = request.get_json(force=True) tweet = jsondata['tweet'] tweet = replacePunctuation(tweet) print tweet keywordsList = [] for word in tweet.split(' '): if any(x.isupper() for x in word): keywordsList.append(rel.get_keywords(word.lower(), searchtype = 2)[0:10]) dictKeywords = {'keywordList' : keywordsList} return Response(json.dumps(dictKeywords), mimetype='application/json') @app.route('/relatedUsers', methods=['OPTIONS', 'GET', 'POST']) @crossdomain(origin='*') def getRelatedUser(): global rel jsondata = request.get_json(force=True) hashtag = jsondata['term'] return Response(json.dumps(getRelatedTerms(hashtag, 1)), mimetype='application/json') @app.route('/relatedWords', methods=['OPTIONS', 'GET', 'POST']) @crossdomain(origin='*') def getRelatedWords(): global rel jsondata = request.get_json(force=True) hashtag = jsondata['term'] return Response(json.dumps(getRelatedTerms(hashtag, 3)), mimetype='application/json') @app.route('/relatedAll', methods=['OPTIONS', 'GET', 'POST']) @crossdomain(origin='*') def getRelatedAll(): global rel jsondata = request.get_json(force=True) hashtag = jsondata['term'] return Response(json.dumps(getRelatedTerms(hashtag, 4)), mimetype='application/json') @app.route('/wordCount', methods=['OPTIONS', 'GET', 'POST']) @crossdomain(origin='*') def getWordCount(): global rel jsondata = request.get_json(force=True) word = jsondata['term'] return Response(json.dumps({'count' : rel.get_word_count(replacePunctuation(word))}), mimetype='application/json') if __name__ == "__main__": app.debug = True app.run(threaded=True)
mit
6,966,130,194,898,723,000
30.247788
224
0.622096
false
3.39097
false
false
false
TzuChieh/Photon-v2
BlenderAddon/PhotonBlend/bmodule/material.py
1
5390
from ..utility import settings from . import ui from . import common from . import node import sys import bpy class PhMaterialHeaderPanel(bpy.types.Panel): bl_label = "" bl_context = "material" bl_space_type = "PROPERTIES" bl_region_type = "WINDOW" bl_options = {"HIDE_HEADER"} COMPATIBLE_ENGINES = {settings.renderer_id_name} @classmethod def poll(cls, b_context): render_settings = b_context.scene.render return (render_settings.engine in cls.COMPATIBLE_ENGINES and (b_context.material or b_context.object)) def draw(self, b_context): layout = self.layout mat = b_context.material obj = b_context.object mat_slot = b_context.material_slot space = b_context.space_data if obj: is_sortable = len(obj.material_slots) > 1 rows = 1 if is_sortable: rows = 4 row = layout.row() row.template_list("MATERIAL_UL_matslots", "", obj, "material_slots", obj, "active_material_index", rows = rows) col = row.column(align = True) col.operator("object.material_slot_add", icon = "ZOOMIN", text = "") col.operator("object.material_slot_remove", icon = "ZOOMOUT", text = "") col.menu("MATERIAL_MT_specials", icon = "DOWNARROW_HLT", text = "") if is_sortable: col.separator() col.operator("object.material_slot_move", icon = "TRIA_UP", text = "").direction = "UP" col.operator("object.material_slot_move", icon = "TRIA_DOWN", text = "").direction = "DOWN" if obj.mode == 'EDIT': row = layout.row(align = True) row.operator("object.material_slot_assign", text = "Assign") row.operator("object.material_slot_select", text = "Select") row.operator("object.material_slot_deselect", text = "Deselect") split = layout.split(percentage = 0.65) if obj: split.template_ID(obj, "active_material", new = "material.new") row = split.row() if mat_slot: row.prop(mat_slot, "link", text = "") else: row.label() elif mat: split.template_ID(space, "pin_id") split.separator() class PhAddMaterialNodesOperator(bpy.types.Operator): """Adds a node tree for a material.""" bl_idname = "photon.add_material_nodes" bl_label = "Add Material Nodes" @classmethod def poll(cls, b_context): b_material = getattr(b_context, "material", None) node_tree = cls.__get_node_tree(b_material) return b_material is not None and node_tree is None def execute(self, b_context): b_material = b_context.material node_tree_name = common.mangled_node_tree_name(b_material) node_tree = bpy.data.node_groups.new(node_tree_name, type = "PH_MATERIAL_NODE_TREE") # Since we use node tree name to remember which node tree is used by a material, # the node tree's use count will not be increased, resulting in data not being # stored in .blend file sometimes. Use fake user is sort of hacked. node_tree.use_fake_user = True b_material.ph_node_tree_name = node_tree_name return {"FINISHED"} @classmethod def __get_node_tree(cls, b_material): if b_material is None: return None else: return bpy.data.node_groups.get(b_material.ph_node_tree_name, None) class PhMaterialPanel(bpy.types.Panel): bl_space_type = "PROPERTIES" bl_region_type = "WINDOW" bl_context = "material" COMPATIBLE_ENGINES = {settings.renderer_id_name, settings.cycles_id_name} @classmethod def poll(cls, b_context): render_settings = b_context.scene.render return (render_settings.engine in cls.COMPATIBLE_ENGINES and b_context.material) class PhMainPropertyPanel(PhMaterialPanel): """ Setting up primary material properties. """ bl_label = "PR - Material" def draw(self, context): layout = self.layout layout.operator(PhAddMaterialNodesOperator.bl_idname) node_tree = node.find_node_tree(context.material) output_node = node.find_output_node(node_tree) if output_node is not None: for input_socket in output_node.inputs: layout.template_node_view(node_tree, output_node, input_socket) # ui.material.display_blender_props(layout, material) class PhOptionPanel(PhMaterialPanel): """ Additional options for tweaking the material. """ bl_label = "PR - Options" bpy.types.Material.ph_is_emissive = bpy.props.BoolProperty( name = "Emissive", description = "whether consider current material's emissivity or not", default = False ) bpy.types.Material.ph_emitted_radiance = bpy.props.FloatVectorProperty( name = "Radiance", description = "radiance emitted by the surface", default = [0.0, 0.0, 0.0], min = 0.0, max = sys.float_info.max, subtype = "COLOR", size = 3 ) def draw(self, context): material = context.material layout = self.layout row = layout.row() row.prop(material, "ph_is_emissive") row.prop(material, "ph_emitted_radiance") MATERIAL_PANEL_TYPES = [ PhMaterialHeaderPanel, PhMainPropertyPanel, PhOptionPanel ] MATERIAL_OPERATOR_TYPES = [ PhAddMaterialNodesOperator ] def register(): ui.material.define_blender_props() class_types = MATERIAL_PANEL_TYPES + MATERIAL_OPERATOR_TYPES for class_type in class_types: bpy.utils.register_class(class_type) def unregister(): class_types = MATERIAL_PANEL_TYPES + MATERIAL_OPERATOR_TYPES for class_type in class_types: bpy.utils.unregister_class(class_type) if __name__ == "__main__": register()
mit
7,535,028,260,935,148,000
25.165049
114
0.682004
false
2.98615
false
false
false
inyaka/catalog
context_processors.py
1
1986
from django.utils.text import capfirst from django.db.models import get_models from django.utils.safestring import mark_safe from django.contrib.admin import ModelAdmin # get_models returns all the models, but there are # some which we would like to ignore IGNORE_MODELS = ( "sites", "sessions", "admin", "permission", "contenttypes", "thumbnail", "products_image", "auth_permission", "static_precompiler", ) def app_list(request): ''' Get all models and add them to the context apps variable. ''' user = request.user app_dict = {} admin_class = ModelAdmin for model in get_models(): model_admin = admin_class(model, None) app_label = model._meta.app_label db_table= model._meta.db_table if (app_label in IGNORE_MODELS) or (db_table in IGNORE_MODELS): continue has_module_perms = user.has_module_perms(app_label) if has_module_perms: perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True in perms.values(): model_dict = { 'name': capfirst(model._meta.verbose_name_plural), 'admin_url': mark_safe('%s/%s/' % (app_label, model.__name__.lower())), } if app_label in app_dict: app_dict[app_label]['models'].append(model_dict) else: app_dict[app_label] = { 'name': app_label.title(), 'app_url': app_label + '/', 'has_module_perms': has_module_perms, 'models': [model_dict], } app_list = app_dict.values() app_list.sort(key=lambda x: x['name']) for app in app_list: app['models'].sort(key=lambda x: x['name']) return {'apps': app_list}
gpl-3.0
3,297,654,345,102,705,000
34.482143
91
0.549345
false
3.91716
false
false
false
comodojo/rpcserver
docs/source/conf.py
1
5364
# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- Project information ----------------------------------------------------- project = u'comodojo/rpcserver' copyright = u'2018, Marco Giovinazzi' author = u'Marco Giovinazzi' # The short X.Y version version = u'' # The full version, including alpha/beta/rc tags release = u'2.0.0' # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} html_favicon = '_static/favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'comodojorpcserverdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'comodojorpcserver.tex', u'comodojo/rpcserver documentation', u'Marco Giovinazzi', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'comodojorpcserver', u'comodojo/rpcserver documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'comodojorpcserver', u'comodojo/rpcserver documentation', author, 'comodojorpcserver', 'Framework-independent XML and JSON(2.0) RPC server', 'Miscellaneous'), ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] epub_tocdepth = 2
mit
525,443,802,569,360,100
28.96648
87
0.655108
false
3.909621
true
false
false
motord/elbowgrease
settings.py
1
2021
import logging import tornado import tornado.template import os from tornado.options import define, options # import environment # import logconfig # Make filepaths relative to settings. path = lambda root,*a: os.path.join(root, *a) ROOT = os.path.dirname(os.path.abspath(__file__)) define("port", default=8888, help="run on the given port", type=int) define("config", default=None, help="tornado config file") define("debug", default=False, help="debug mode") tornado.options.parse_command_line() MEDIA_ROOT = path(ROOT, 'media') TEMPLATE_ROOT = path(ROOT, 'templates') # Deployment Configuration class DeploymentType: PRODUCTION = "PRODUCTION" DEV = "DEV" SOLO = "SOLO" STAGING = "STAGING" dict = { SOLO: 1, PRODUCTION: 2, DEV: 3, STAGING: 4 } if 'DEPLOYMENT_TYPE' in os.environ: DEPLOYMENT = os.environ['DEPLOYMENT_TYPE'].upper() else: DEPLOYMENT = DeploymentType.SOLO settings = {} settings['debug'] = DEPLOYMENT != DeploymentType.PRODUCTION or options.debug # settings['static_path'] = MEDIA_ROOT settings['cookie_secret'] = "your-cookie-secret" # settings['xsrf_cookies'] = True settings['template_loader'] = tornado.template.Loader(TEMPLATE_ROOT) SYSLOG_TAG = "boilerplate" SYSLOG_FACILITY = logging.handlers.SysLogHandler.LOG_LOCAL2 # See PEP 391 and logconfig for formatting help. Each section of LOGGERS # will get merged into the corresponding section of log_settings.py. # Handlers and log levels are set up automatically based on LOG_LEVEL and DEBUG # unless you set them here. Messages will not propagate through a logger # unless propagate: True is set. LOGGERS = { 'loggers': { 'boilerplate': {}, }, } if settings['debug']: LOG_LEVEL = logging.DEBUG else: LOG_LEVEL = logging.INFO USE_SYSLOG = DEPLOYMENT != DeploymentType.SOLO # logconfig.initialize_logging(SYSLOG_TAG, SYSLOG_FACILITY, LOGGERS, # LOG_LEVEL, USE_SYSLOG) if options.config: tornado.options.parse_config_file(options.config)
mit
4,620,776,737,570,405,000
27.069444
79
0.711529
false
3.346026
true
false
false
iosonofabio/haplotree
src/treeviewer.py
1
16230
# vim: fdm=marker ''' author: Fabio Zanini date: 08/12/14 content: Plot tree of haplotypes. ''' # Modules from __future__ import print_function import os import argparse import numpy as np import seaborn as sns import matplotlib.pyplot as plt # Functions def tree_from_json(json_file): '''Convert JSON into a Biopython tree''' from Bio import Phylo import json def node_from_json(json_data, node): '''Biopython Clade from json (for recursive call)''' for attr in json_data: val = json_data[attr] if attr == 'children': for sub_json in val: child = Phylo.BaseTree.Clade() node.clades.append(child) node_from_json(sub_json, child) else: if attr == 'name': node.__setattr__(attr, str(val)) continue try: node.__setattr__(attr, float(val)) except: node.__setattr__(attr, val) try: with open(json_file, 'r') as infile: json_data = json.load(infile) except IOError: raise IOError("Cannot open "+json_file) tree = Phylo.BaseTree.Tree() node_from_json(json_data, tree.root) tree.root.branch_length=0.01 return tree def draw_tree(tree, label_func=str, do_show=True, show_confidence=True, # For power users x_offset=0, y_offset=0, axes=None, branch_labels=None, *args, **kwargs): """Plot the given tree using matplotlib (or pylab). The graphic is a rooted tree, drawn with roughly the same algorithm as draw_ascii. Additional keyword arguments passed into this function are used as pyplot options. The input format should be in the form of: pyplot_option_name=(tuple), pyplot_option_name=(tuple, dict), or pyplot_option_name=(dict). Example using the pyplot options 'axhspan' and 'axvline': >>> Phylo.draw(tree, axhspan=((0.25, 7.75), {'facecolor':'0.5'}), ... axvline={'x':'0', 'ymin':'0', 'ymax':'1'}) Visual aspects of the plot can also be modified using pyplot's own functions and objects (via pylab or matplotlib). In particular, the pyplot.rcParams object can be used to scale the font size (rcParams["font.size"]) and line width (rcParams["lines.linewidth"]). :Parameters: label_func : callable A function to extract a label from a node. By default this is str(), but you can use a different function to select another string associated with each node. If this function returns None for a node, no label will be shown for that node. do_show : bool Whether to show() the plot automatically. show_confidence : bool Whether to display confidence values, if present on the tree. axes : matplotlib/pylab axes If a valid matplotlib.axes.Axes instance, the phylogram is plotted in that Axes. By default (None), a new figure is created. branch_labels : dict or callable A mapping of each clade to the label that will be shown along the branch leading to it. By default this is the confidence value(s) of the clade, taken from the ``confidence`` attribute, and can be easily toggled off with this function's ``show_confidence`` option. But if you would like to alter the formatting of confidence values, or label the branches with something other than confidence, then use this option. """ try: import matplotlib.pyplot as plt except ImportError: try: import pylab as plt except ImportError: from Bio import MissingPythonDependencyError raise MissingPythonDependencyError( "Install matplotlib or pylab if you want to use draw.") import matplotlib.collections as mpcollections # Arrays that store lines for the plot of clades horizontal_linecollections = [] vertical_linecollections = [] # Options for displaying branch labels / confidence def conf2str(conf): if int(conf) == conf: return str(int(conf)) return str(conf) if not branch_labels: if show_confidence: def format_branch_label(clade): if hasattr(clade, 'confidences'): # phyloXML supports multiple confidences return '/'.join(conf2str(cnf.value) for cnf in clade.confidences) if clade.confidence: return conf2str(clade.confidence) return None else: def format_branch_label(clade): return None elif isinstance(branch_labels, dict): def format_branch_label(clade): return branch_labels.get(clade) else: assert callable(branch_labels), \ "branch_labels must be either a dict or a callable (function)" format_branch_label = branch_labels # Layout def get_x_positions(tree): """Create a mapping of each clade to its horizontal position. Dict of {clade: x-coord} """ depths = tree.depths() # If there are no branch lengths, assume unit branch lengths if not max(depths.values()): depths = [x_offset + depth for depth in tree.depths(unit_branch_lengths=True)] return depths def get_y_positions(tree): """Create a mapping of each clade to its vertical position. Dict of {clade: y-coord}. Coordinates are negative, and integers for tips. """ maxheight = tree.count_terminals() # Rows are defined by the tips heights = dict((tip, maxheight - i + y_offset) for i, tip in enumerate(reversed(tree.get_terminals()))) # Internal nodes: place at midpoint of children def calc_row(clade): for subclade in clade: if subclade not in heights: calc_row(subclade) # Closure over heights heights[clade] = (heights[clade.clades[0]] + heights[clade.clades[-1]]) / 2.0 if tree.root.clades: calc_row(tree.root) return heights x_posns = get_x_positions(tree) y_posns = get_y_positions(tree) # The function draw_clade closes over the axes object if axes is None: fig = plt.figure() axes = fig.add_subplot(1, 1, 1) elif not isinstance(axes, plt.matplotlib.axes.Axes): raise ValueError("Invalid argument for axes: %s" % axes) def draw_clade_lines(use_linecollection=False, orientation='horizontal', y_here=0, x_start=0, x_here=0, y_bot=0, y_top=0, color='black', lw='.1'): """Create a line with or without a line collection object. Graphical formatting of the lines representing clades in the plot can be customized by altering this function. """ if (use_linecollection is False and orientation == 'horizontal'): axes.hlines(y_here, x_start, x_here, color=color, lw=lw) elif (use_linecollection is True and orientation == 'horizontal'): horizontal_linecollections.append(mpcollections.LineCollection( [[(x_start, y_here), (x_here, y_here)]], color=color, lw=lw),) elif (use_linecollection is False and orientation == 'vertical'): axes.vlines(x_here, y_bot, y_top, color=color) elif (use_linecollection is True and orientation == 'vertical'): vertical_linecollections.append(mpcollections.LineCollection( [[(x_here, y_bot), (x_here, y_top)]], color=color, lw=lw),) def draw_clade(clade, x_start, color, lw): """Recursively draw a tree, down from the given clade.""" x_here = x_posns[clade] y_here = y_posns[clade] # phyloXML-only graphics annotations if hasattr(clade, 'color') and clade.color is not None: color = clade.color.to_hex() if hasattr(clade, 'width') and clade.width is not None: lw = clade.width * plt.rcParams['lines.linewidth'] # Draw a horizontal line from start to here draw_clade_lines(use_linecollection=True, orientation='horizontal', y_here=y_here, x_start=x_start, x_here=x_here, color=color, lw=lw) # Add node/taxon labels label = label_func(clade) if label not in (None, clade.__class__.__name__): axes.text(x_here, y_here, ' %s' % label, verticalalignment='center') # Add label above the branch (optional) conf_label = format_branch_label(clade) if conf_label: axes.text(0.5 * (x_start + x_here), y_here, conf_label, fontsize='small', horizontalalignment='center') if clade.clades: # Draw a vertical line connecting all children y_top = y_posns[clade.clades[0]] y_bot = y_posns[clade.clades[-1]] # Only apply widths to horizontal lines, like Archaeopteryx draw_clade_lines(use_linecollection=True, orientation='vertical', x_here=x_here, y_bot=y_bot, y_top=y_top, color=color, lw=lw) # Draw descendents for child in clade: draw_clade(child, x_here, color, lw) draw_clade(tree.root, 0, 'k', plt.rcParams['lines.linewidth']) # If line collections were used to create clade lines, here they are added # to the pyplot plot. for i in horizontal_linecollections: axes.add_collection(i) for i in vertical_linecollections: axes.add_collection(i) # Aesthetics if hasattr(tree, 'name') and tree.name: axes.set_title(tree.name) axes.set_xlabel('branch length') axes.set_ylabel('taxa') # Add margins around the tree to prevent overlapping the axes xmax = max(x_posns.values()) axes.set_xlim(-0.05 * xmax, 1.25 * xmax) # Also invert the y-axis (origin at the top) # Add a small vertical margin, but avoid including 0 and N+1 on the y axis axes.set_ylim(max(y_posns.values()) + 0.8, 0.2) # Parse and process key word arguments as pyplot options for key, value in kwargs.items(): try: # Check that the pyplot option input is iterable, as required [i for i in value] except TypeError: raise ValueError('Keyword argument "%s=%s" is not in the format ' 'pyplot_option_name=(tuple), pyplot_option_name=(tuple, dict),' ' or pyplot_option_name=(dict) ' % (key, value)) if isinstance(value, dict): getattr(plt, str(key))(**dict(value)) elif not (isinstance(value[0], tuple)): getattr(plt, str(key))(*value) elif (isinstance(value[0], tuple)): getattr(plt, str(key))(*value[0], **dict(value[1])) if do_show: plt.show() def load_tree(filename, fmt=None): '''Load a tree from file''' from Bio import Phylo if fmt is None: fmt = filename.split('.')[-1].lower() if fmt == 'json': tree = tree_from_json(filename) elif fmt == 'newick': def set_frequency(node): if node.name is not None: try: frequency = float(node.name.split(':')[-1]) except ValueError: pass else: node.frequency = frequency for child in node.clades: set_frequency(child) tree = Phylo.read(filename, 'newick') set_frequency(tree.root) else: raise NotImplemented return tree def plot_haplotype_trees(datum, VERBOSE=0, tree_label='root', draw_legend_sizes=True, draw_scale_bar=True, fig_filename=None): '''Plot tree of minor haplotypes in a typical patient''' from operator import attrgetter import seaborn as sns from matplotlib import pyplot as plt plt.ioff() if VERBOSE: print('Plot haplotype tree') fig, ax = plt.subplots(1, 1, figsize=(7, 5)) sns.set_style('white') ax.grid(False) x_offset = 0 y_offset = 35 y_padding = 15 tree = getattr(datum, tree_label) tree.root.branch_length = 0.01 depths = tree.depths() maxdepth = max(depths.values()) mindepth = min(depths.values()) # Normalize frequencies freqsum = sum(leaf.frequency for leaf in tree.get_terminals()) for leaf in tree.get_terminals(): leaf.frequency = 1.0 * leaf.frequency / freqsum # Collect data for circle plot rmin = 5 rmax = 150 rfun = lambda hf: rmin + (rmax - rmin) * (hf**(0.5)) data_circles = [] for il, leaf in enumerate(tree.get_terminals(), 1): hf = leaf.frequency r = rfun(hf) y = il + y_offset x = depths[leaf] + x_offset data_circles.append((x, y, 2 * r, 'grey', 'black')) # Draw the tree draw_tree(tree, show_confidence=False, label_func=lambda x: '', axes=ax, x_offset=x_offset, y_offset=y_offset, do_show=False) # Add circles to the leaves (x, y, s, c,cs) = zip(*data_circles) ax.scatter(x, y, s=s, c=c, edgecolor=cs, zorder=2) ax.set_xlim(-0.04 * maxdepth, 1.04 * maxdepth) y_offset += tree.count_terminals() + y_padding ax.set_ylim((y_offset + y_padding, 0)) ax.set_ylabel('') ax.set_yticklabels([]) ax.set_axis_off() ax.xaxis.set_tick_params(labelsize=16) ax.set_xlabel('Genetic distance [changes / site]', fontsize=16, labelpad=10) # Draw a "legend" for sizes if draw_legend_sizes: datal = [{'hf': 0.05, 'label': '5%'}, {'hf': 0.20, 'label': '20%'}, {'hf': 1.00, 'label': '100%'}] ax.text(0.98 * maxdepth, 0.03 * ax.get_ylim()[0], 'Haplotype frequency:', fontsize=16, ha='right') for idl, datuml in enumerate(datal): r = rfun(datuml['hf']) y = (0.07 + 0.07 * idl) * ax.get_ylim()[0] ax.scatter(0.85 * maxdepth, y, s=r, facecolor='grey', edgecolor='black') ax.text(0.98 * maxdepth, y + 0.02 * ax.get_ylim()[0], datuml['label'], ha='right', fontsize=14) # Draw scale bar if draw_scale_bar: xbar = (0.01 + 0.3 * (len(datal) >= 9)) * maxdepth ybar = 0.02 * ax.get_ylim()[0] lbar = 0.05 * maxdepth lbar_label = '{:.1G}'.format(lbar) lbar = float(lbar_label) ax.plot([xbar, xbar + lbar], [ybar, ybar], lw=4, c='k') ax.text(xbar + 0.5 * lbar, ybar + 0.08 * ax.get_ylim()[0], lbar_label, fontsize=14, ha='center') plt.tight_layout(rect=(0, -0.32, 1, 1)) if fig_filename: fig.savefig(fig_filename) plt.close(fig) else: plt.show() # Script if __name__ == '__main__': parser = argparse.ArgumentParser(description='Plot a haplotype tree', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('filename', help='Filename with the tree in JSON format') parser.add_argument('--verbose', type=int, default=2, help='Verbosity level [0-4]') parser.add_argument('--outputfile', default='', help='Output file for the figure') args = parser.parse_args() tree = load_tree(args.filename) plot_haplotype_trees(tree, VERBOSE=args.verbose, fig_filename=args.outputfile)
mit
-1,316,847,662,179,091,000
34.986696
92
0.567344
false
3.867048
false
false
false
GuLinux/PlanetaryImager
scripting_client/planetaryimager/network/driver_protocol.py
1
3043
from .protocol import * import PyQt5 class Camera: def __init__(self, camera_dict): self.name = camera_dict['n'] self.address = camera_dict['a'] def __str__(self): return '{} [{}]'.format(self.name, self.address) def __repr__(self): return self.__str__() @protocol(area='Driver', packets=['CameraList', 'CameraListReply', 'GetCameraName', 'GetCameraNameReply', 'ConnectCamera', 'ConnectCameraReply', \ 'CloseCamera', 'signalDisconnected', 'signalCameraConnected', 'signalFPS', 'signalTemperature', 'signalControlChanged', \ 'GetControls', 'GetControlsReply', 'GetProperties', 'GetPropertiesReply', 'StartLive', 'StartLiveReply', 'SetControl', \ 'SetROI', 'ClearROI']) class DriverProtocol: def camera_list(self): return [Camera(x) for x in self.client.round_trip(self.packet_cameralist.packet(), self.packet_cameralistreply).variant] def connect_camera(self, camera): self.client.send(self.packet_connectcamera.packet(variant=camera.address)) def close_camera(self): self.client.send(self.packet_closecamera.packet()) def get_camera_name(self): return self.client.round_trip(self.packet_getcameraname.packet(), self.packet_getcameranamereply).variant def get_controls(self): return self.client.round_trip(self.packet_getcontrols.packet(), self.packet_getcontrolsreply).variant def set_control(self, control): self.client.send(self.packet_setcontrol.packet(variant=control)) def set_roi(self, x, y, width, height): self.client.send(self.packet_setroi.packet(variant=PyQt5.QtCore.QRect(x, y, width, height))) def clear_roi(self): self.client.send(self.packet_clearroi.packet()) def get_properties(self): return self.client.round_trip(self.packet_getproperties.packet(), self.packet_getpropertiesreply).variant def start_live(self): return self.client.round_trip(self.packet_startlive.packet(), self.packet_startlivereply) def on_signal_fps(self, callback): def dispatch(packet): callback(packet.variant) Protocol.register_packet_handler(self.client, self.packet_signalfps, dispatch) def on_camera_connected(self, callback): def dispatch(_): callback() Protocol.register_packet_handler(self.client, self.packet_signalcameraconnected, dispatch) def on_camera_disconnected(self, callback): def dispatch(_): callback() Protocol.register_packet_handler(self.client, self.packet_signaldisconnected, dispatch) def on_signal_temperature(self, callback): def dispatch(packet): callback(packet.variant) Protocol.register_packet_handler(self.client, self.packet_signaltemperature, dispatch) def on_control_changed(self, callback): def dispatch(packet): callback(packet.variant) Protocol.register_packet_handler(self.client, self.packet_signalcontrolchanged, dispatch)
gpl-3.0
8,043,753,285,607,263,000
41.263889
155
0.683536
false
3.891304
false
false
false
bioinf-jku/SNNs
figure1/utils.py
1
4576
# -*- coding: utf-8 -*- ''' Tensorflow Implementation of the Scaled ELU function and Dropout ''' import numbers import numpy as np import tensorflow as tf from tensorflow.contrib import layers from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import tensor_util from tensorflow.python.layers import utils from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops # (1) scale inputs to zero mean and unit variance # (2) use SELUs def selu(x): with ops.name_scope('elu') as scope: alpha = 1.6732632423543772848170429916717 scale = 1.0507009873554804934193349852946 return scale*tf.where(x>=0.0, x, alpha*tf.nn.elu(x)) # (3) initialize weights with stddev sqrt(1/n) # e.g. use: initializer = layers.variance_scaling_initializer(factor=1.0, mode='FAN_IN') # (4) use this dropout def dropout_selu(x, rate, alpha= -1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0, noise_shape=None, seed=None, name=None, training=False): """Dropout to a value with rescaling.""" def dropout_selu_impl(x, rate, alpha, noise_shape, seed, name): keep_prob = 1.0 - rate x = ops.convert_to_tensor(x, name="x") if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1: raise ValueError("keep_prob must be a scalar tensor or a float in the " "range (0, 1], got %g" % keep_prob) keep_prob = ops.convert_to_tensor(keep_prob, dtype=x.dtype, name="keep_prob") keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar()) alpha = ops.convert_to_tensor(alpha, dtype=x.dtype, name="alpha") keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar()) if tensor_util.constant_value(keep_prob) == 1: return x noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x) random_tensor = keep_prob random_tensor += random_ops.random_uniform(noise_shape, seed=seed, dtype=x.dtype) binary_tensor = math_ops.floor(random_tensor) ret = x * binary_tensor + alpha * (1-binary_tensor) a = math_ops.sqrt(fixedPointVar / (keep_prob *((1-keep_prob) * math_ops.pow(alpha-fixedPointMean,2) + fixedPointVar))) b = fixedPointMean - a * (keep_prob * fixedPointMean + (1 - keep_prob) * alpha) ret = a * ret + b ret.set_shape(x.get_shape()) return ret with ops.name_scope(name, "dropout", [x]) as name: return utils.smart_cond(training, lambda: dropout_selu_impl(x, rate, alpha, noise_shape, seed, name), lambda: array_ops.identity(x)) def get_timestamp(fmt='%y%m%d_%H%M'): '''Returns a string that contains the current date and time. Suggested formats: short_format=%y%m%d_%H%M (default) long format=%Y%m%d_%H%M%S ''' import datetime now = datetime.datetime.now() return datetime.datetime.strftime(now, fmt) def generate_slices(n, slice_size, allow_smaller_final_batch=True): """Generates slices of given slice_size up to n""" start, end = 0, 0 for pack_num in range(int(n / slice_size)): end = start + slice_size yield slice(start, end, None) start = end # last slice might not be a full batch if allow_smaller_final_batch: if end < n: yield slice(end, n, None) def generate_minibatches(batch_size, ph_list, data_list, n_epochs=1, allow_smaller_final_batch=False, shuffle=True, feed_dict=None): cnt_epochs = 0 assert len(ph_list) == len(data_list), "Passed different number of data and placeholders" assert len(data_list) >= 0, "Passed empty lists" n_samples = data_list[0].shape[0] n_items = len(data_list) while True: if shuffle: idx = np.arange(n_samples) np.random.shuffle(idx) for i in range(n_items): data_list[i] = data_list[i][idx] if feed_dict is None: feed_dict = {} for s in generate_slices(n_samples, batch_size, allow_smaller_final_batch): for i in range(n_items): ph = ph_list[i] d = data_list[i][s] feed_dict[ph] = d yield feed_dict cnt_epochs += 1 if n_epochs is not None and cnt_epochs >= n_epochs: break
gpl-3.0
-7,715,505,530,584,231,000
34.75
126
0.621941
false
3.42003
false
false
false
elin-moco/metrics
metrics/etl/management/commands/load.py
1
1229
from django.core.management.base import BaseCommand from metrics.etl.tools import related_posts_extract, fx_extract, log_extract, pd_transform, main_extract, moztech_extract, mozblog_extract, newsletter_extract, moztech_load, browser_survey_extract from metrics.settings import LOG_PATH class Command(BaseCommand): help = 'Perform ETL cycle' def handle(self, *args, **options): if args is None: return if args[0] == 'log': log_extract.main((LOG_PATH, args[1])) if args[0] == 'fx': fx_extract.main() pd_transform.main() if args[0] == 'main': main_extract.main() if args[0] == 'moztech': moztech_extract.main() moztech_load.main() if args[0] == 'mozblog': mozblog_extract.main() if args[0] == 'newsletter': newsletter_extract.main() if args[0] == 'related_posts': if len(args) > 1: related_posts_extract.main(args[1]) else: related_posts_extract.main('blog') related_posts_extract.main('tech') if args[0] == 'browser_survey': browser_survey_extract.main()
bsd-3-clause
2,593,122,512,009,476,600
36.242424
196
0.567128
false
3.735562
false
false
false
briandorsey/partisci
clients/python/pypartisci.py
1
1390
import httplib import json import time import random import socket __version__ = "1.0" def serialize(app, ver, host, instance): update = dict( app=app, ver=ver, host=host, instance=instance) data = json.dumps(update) return data def send_udp(server, port, app, ver, host="", instance=0): if not host: try: host = socket.gethostname() except StandardError: pass data = serialize(app, ver, host, instance) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((server, port)) s.send(data) s.close() return def send_http(server, port, app, ver, host="", instance=0): conn = httplib.HTTPConnection(server, port) body = serialize(app, ver, host, instance) conn.request("POST", "/api/v1/update/", body) response = conn.getresponse() data = response.read() conn.close() return response.status, data if __name__ == '__main__': versions = ["1.0", "2.0", "3.0"] hosts = ["abc", "def", "ghi"] instances = [0, 1, 2, 3] while True: print "%-14s Sending update" % time.time() send_udp('localhost', 7777, 'Python Client demo', random.choice(versions), random.choice(hosts), random.choice(instances)) time.sleep(2)
bsd-2-clause
4,227,259,765,084,404,000
24.272727
59
0.558993
false
3.706667
false
false
false
SFPD/rlreloaded
3rdparty/climin/cg.py
1
10960
# -*- coding: utf-8 -*- """Module containing functionality for conjugate gradients. Conjugate gradients is motivated from a first order Taylor expansion of the objective: .. math:: f(\\theta_t + \\alpha_t d_t) \\approx f(\\theta_t) + \\alpha_td_t^Tf'(\\theta_t). To locally decrease the objective, it is optimal to set :math:`d_t \propto -f'(\\theta_t)` and find :math:`\\alpha_t` with a line search algorithm, which is known as steepest descent. Yet, a well known disadvantage of this approach is that directions found at :math:`t` will often interfere with directions found for :math:`t' < t`. The solution to this problem is to chose :math:`d_t` in a way that it does not interfere with previous updates. If the dimensions of our problem were independent, we could just move along these dimensions. If they were independent up to rotation, we would have to chose directions which are orthogonal to each other. This is exactly the case when the Hessian of the problem, :math:`A` is diagonal. If it is not diagonal, we have to move along directions which are called *conjugate* to each other with respect to the matrix :math:`A`. The conjugate gradients algorithms provide methods to do so efficiently. The linear conjugate gradients algorithm assumes that the objective is a quadratic and can thus determine :math:`\\alpha` exactly. Nonlinear conjugate gradients works on arbitrary functions (yet, the Taylor expansion assumption above has to be reasonable). Since the Hessian :math:`A` is not constant in this case, the previous directions (to which a new direction has to be conjugate) have to be reset from time to time. Additionally, we need to perform a line search to solve for :math:`\\alpha_t`. """ import warnings import scipy import numpy as np import scipy.linalg import scipy.optimize from base import Minimizer, is_nonzerofinite from linesearch import WolfeLineSearch class ConjugateGradient(Minimizer): """ConjugateGradient class. Minimize a quadratic objective of the form .. math:: f(\\theta) = {1 \over 2} \\theta^TA\\theta + \\theta^Tb + c. The minimization will take place by moving along conjugate directions of steepest decrease in the error. This will take at most as many steps as the dimensionality of the problem. .. note:: In most cases it is better to use ``scipy.optimize.solve``. Only use this function if you want to monitor intermediate quantities and are not entirely interested in optimization of a quadratic objective, but in a different error measure. E.g. as in Hessian free optimization. Attributes ---------- wrt : array_like Parameters of the problem. H : array_like, 2 dimensional, square Curvature term of the quadratic, the Hessian. b : array_like Linear term of the quadratic. f_Hp : callable Function to calculcate the dot product of a Hessian with an arbitrary vector. min_grad : float, optional, default: 1e-14 If all components of the gradient fall below this threshold, stop optimization. precond : array_like Matrix to precondition the problem. If a vector, is taken to represent a diagonal matrix. """ def __init__(self, wrt, H=None, b=None, f_Hp=None, min_grad=1e-14, precond=None): """Create a ConjugateGradient object. Parameters ---------- wrt : array_like Parameters of the problem. H : array_like, 2 dimensional, square Curvature term of the quadratic, the Hessian. b : array_like Linear term of the quadratic. f_Hp : callable Function to calculcate the dot product of a Hessian with an arbitrary vector. min_grad : float, optional, default: 1e-14 If all components of the gradient fall below this threshold, stop optimization. precond : array_like Matrix to precondition the problem. If a vector, is taken to represent a diagonal matrix. """ super(ConjugateGradient, self).__init__( wrt, args=None) self.f_Hp = f_Hp if f_Hp is not None else lambda p: np.dot(H, p) self.b = b self.min_grad = min_grad self.precond = precond def solve(self, r): if self.precond is None: return r elif self.precond.ndim == 1: #if the preconditioning matrix is diagonal, #then it is supposedly given as a vector return r / self.precond else: return scipy.linalg.solve(self.precond, r) def __iter__(self): grad = self.f_Hp(self.wrt) - self.b y = self.solve(grad) direction = -y # If the gradient is exactly zero, we stop. Otherwise, the # updates will lead to NaN errors because the direction will # be zero. if (grad == 0).all(): warnings.warn('gradient is 0') return for i in range(self.wrt.size): Hp = self.f_Hp(direction) if not np.isfinite(Hp).all(): print "hessian vector product is not finite. aborting cg" break ry = np.dot(grad, y) pHp = np.inner(direction, Hp) step_length = ry / pHp self.wrt += step_length * direction # We do this every few iterations to compensate for possible # numerical errors due to additions. if i % 10 == 0: grad = self.f_Hp(self.wrt) - self.b else: grad += step_length * Hp y = self.solve(grad) beta = np.dot(grad, y) / ry direction = - y + beta * direction # If we don't bail out here, we will enter regions of numerical # instability. if (abs(grad) < self.min_grad).all(): warnings.warn('gradient is below threshold') break yield { 'ry': ry, 'Hp': Hp, 'pHp': pHp, 'step_length': step_length, 'n_iter': i, } class NonlinearConjugateGradient(Minimizer): """ NonlinearConjugateGradient optimizer. NCG minimizes functions by following directions which are conjugate to each other with respect to the Hessian. Since the curvature changes if the objective is nonquadratic, the Hessian will not be accurate and thus the conjugacy of successive search directions as well. Furthermore, the optimal step length cannot be found in closed form and has to be obtained with a line search. During optimization, we sometimes perform a restart. That means we give up on trying to find conjugate directions and use the gradient as a new search direction. This is done whenever two successive directions are far from orthogonal, an indicator that the quadratic assumption is either inaccurate or the Hessian has changed too much lately. Attributes ---------- wrt : array_like Array of parameters of the problem. f : callable Objective function. fprime : callable First derivative of the objective function. min_grad : float If all components of the gradient fall below this value, stop minimization. line_search : LineSearch object. Line search object to perform line searches with. args : iterable Iterable of arguments passed on to the objective function and its derivatives. """ def __init__(self, wrt, f, fprime, min_grad=1e-6, args=None): """Create a NonlinearConjugateGradient object. Parameters ---------- wrt : array_like Array of parameters of the problem. f : callable Objective function. fprime : callable First derivative of the objective function. min_grad : float If all components of the gradient fall below this value, stop minimization. args : iterable, optional Iterable of arguments passed on to the objective function and its derivatives. """ super(NonlinearConjugateGradient, self).__init__(wrt, args=args) self.f = f self.fprime = fprime self.line_search = WolfeLineSearch(wrt, self.f, self.fprime, c2=0.2) self.min_grad = min_grad def find_direction(self, grad_m1, grad, direction_m1): # Computation of beta as a compromise between Fletcher-Reeves # and Polak-Ribiere. grad_norm_m1 = np.dot(grad_m1, grad_m1) grad_diff = grad - grad_m1 betaFR = np.dot(grad, grad) / grad_norm_m1 betaPR = np.dot(grad, grad_diff) / grad_norm_m1 betaHS = np.dot(grad, grad_diff) / np.dot(direction_m1, grad_diff) beta = max(-betaFR, min(betaPR, betaFR)) # Restart if not a direction of sufficient descent, ie if two # consecutive gradients are far from orthogonal. if np.dot(grad, grad_m1) / grad_norm_m1 > 0.1: beta = 0 direction = -grad + beta * direction_m1 return direction, {} def __iter__(self): args, kwargs = self.args.next() grad = self.fprime(self.wrt, *args, **kwargs) grad_m1 = np.zeros(grad.shape) loss = self.f(self.wrt, *args, **kwargs) loss_m1 = 0 for i, (next_args, next_kwargs) in enumerate(self.args): if i == 0: direction, info = -grad, {} else: direction, info = self.find_direction(grad_m1, grad, direction) if not is_nonzerofinite(direction): warnings.warn('gradient is either zero, nan or inf') break # Line search minimization. initialization = 2 * (loss - loss_m1) / np.dot(grad, direction) initialization = min(1, initialization) step_length = self.line_search.search( direction, initialization, args, kwargs) self.wrt += step_length * direction # If we don't bail out here, we will enter regions of numerical # instability. if (abs(grad) < self.min_grad).all(): warnings.warn('gradient is too small') break # Prepare everything for the next loop. args, kwargs = next_args, next_kwargs grad_m1[:], grad[:] = grad, self.line_search.grad loss_m1, loss = loss, self.line_search.val info.update({ 'n_iter': i, 'args': args, 'kwargs': kwargs, 'loss': loss, 'gradient': grad, 'gradient_m1': grad_m1, 'step_length': step_length, }) yield info
mit
1,880,162,668,656,026,400
33.25
84
0.610584
false
4.12185
false
false
false
gjcarneiro/pybindgen
pybindgen/typehandlers/codesink.py
1
4693
""" Objects that receive generated C/C++ code lines, reindents them, and writes them to a file, memory, or another code sink object. """ import sys PY3 = (sys.version_info[0] >= 3) if PY3: string_types = str, else: string_types = basestring, DEBUG = 0 if DEBUG: import traceback import sys class CodeSink(object): """Abstract base class for code sinks""" def __init__(self): r'''Constructor >>> sink = MemoryCodeSink() >>> sink.writeln("foo();") >>> sink.writeln("if (true) {") >>> sink.indent() >>> sink.writeln("bar();") >>> sink.unindent() >>> sink.writeln("zbr();") >>> print sink.flush().rstrip() foo(); if (true) { bar(); zbr(); >>> sink = MemoryCodeSink() >>> sink.writeln("foo();") >>> sink.writeln() >>> sink.writeln("bar();") >>> print len(sink.flush().split("\n")) 4 ''' self.indent_level = 0 # current indent level self.indent_stack = [] # previous indent levels if DEBUG: self._last_unindent_stack = None # for debugging def _format_code(self, code): """Utility method for subclasses to use for formatting code (splits lines and indents them)""" assert isinstance(code, string_types) l = [] for line in code.split('\n'): l.append(' '*self.indent_level + line) return l def writeln(self, line=''): """Write one or more lines of code""" raise NotImplementedError def indent(self, level=4): '''Add a certain ammount of indentation to all lines written from now on and until unindent() is called''' self.indent_stack.append(self.indent_level) self.indent_level += level def unindent(self): '''Revert indentation level to the value before last indent() call''' if DEBUG: try: self.indent_level = self.indent_stack.pop() except IndexError: if self._last_unindent_stack is not None: for line in traceback.format_list(self._last_unindent_stack): sys.stderr.write(line) raise self._last_unindent_stack = traceback.extract_stack() else: self.indent_level = self.indent_stack.pop() class FileCodeSink(CodeSink): """A code sink that writes to a file-like object""" def __init__(self, file_): """ :param file_: a file like object """ CodeSink.__init__(self) self.file = file_ def __repr__(self): return "<pybindgen.typehandlers.codesink.FileCodeSink %r>" % (self.file.name,) def writeln(self, line=''): """Write one or more lines of code""" self.file.write('\n'.join(self._format_code(line))) self.file.write('\n') def __lt__(self, other): if isinstance(other, FileCodeSink): return self.file.name < other.file.name class MemoryCodeSink(CodeSink): """A code sink that keeps the code in memory, and can later flush the code to another code sink""" def __init__(self): "Constructor" CodeSink.__init__(self) self.lines = [] def writeln(self, line=''): """Write one or more lines of code""" self.lines.extend(self._format_code(line)) def flush_to(self, sink): """Flushes code to another code sink :param sink: another CodeSink instance """ assert isinstance(sink, CodeSink) for line in self.lines: sink.writeln(line.rstrip()) self.lines = [] def flush(self): "Flushes the code and returns the formatted output as a return value string" l = [] for line in self.lines: l.extend(self._format_code(line)) self.lines = [] return "\n".join(l) + '\n' class NullCodeSink(CodeSink): """A code sink that discards all content. Useful to 'test' if code generation would work without actually generating anything.""" def __init__(self): "Constructor" CodeSink.__init__(self) def writeln(self, line=''): """Write one or more lines of code""" pass def flush_to(self, sink): """Flushes code to another code sink :param sink: another CodeSink instance """ raise TypeError("Cannot flush a NullCodeSink; it has no content!") def flush(self): "Flushes the code and returns the formatted output as a return value string" raise TypeError("Cannot flush a NullCodeSink; it has no content!")
lgpl-2.1
-1,145,504,394,182,056,200
29.474026
86
0.568506
false
4.131162
false
false
false
maljac/odoo-addons
project_task_desc_html/__openerp__.py
1
1587
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Project Task Description in HTML', 'version': '1.0', 'category': 'Projects & Services', 'sequence': 14, 'summary': '', 'description': """ Project Task Description in HTML ================================ Changes description type on tasks to html """, 'author': 'ADHOC SA', 'website': 'www.adhoc.com.ar', 'images': [ ], 'depends': [ 'project', ], 'data': [ ], 'demo': [ ], 'test': [ ], 'installable': True, 'auto_install': False, 'application': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
-3,380,056,052,953,167,000
31.387755
78
0.561437
false
4.122078
false
false
false
aswolf/xmeos
xmeos/models/compress.py
1
34752
# -*- coding: utf-8 -*- # from __future__ import absolute_import, print_function, division from future.utils import with_metaclass import numpy as np import scipy as sp from abc import ABCMeta, abstractmethod from scipy import integrate import scipy.interpolate as interpolate from . import core __all__ = ['CompressEos','CompressCalc'] #==================================================================== # Models #==================================================================== def set_calculator(eos_mod, kind, kind_opts, path_const, order=3): assert kind in kind_opts, ( kind + ' is not a valid thermal calculator. '+ 'You must select one of: ' + str(kind_opts)) if kind=='Vinet': calc = _Vinet(eos_mod, path_const=path_const, order=order) elif kind=='BirchMurn3': calc = _BirchMurn3(eos_mod, path_const=path_const, order=order) elif kind=='BirchMurn4': calc = _BirchMurn4(eos_mod, path_const=path_const, order=order) elif kind=='GenFiniteStrain': calc = _GenFiniteStrain(eos_mod, path_const=path_const, order=order) elif kind=='Tait': calc = _Tait(eos_mod, path_const=path_const, order=order) elif kind=='PolyRho': calc = _PolyRho(eos_mod, path_const=path_const, order=order) else: raise NotImplementedError(kind+' is not a valid '+\ 'CompressEos Calculator.') eos_mod._add_calculator(calc, calc_type='compress') pass #==================================================================== class CompressEos(with_metaclass(ABCMeta, core.Eos)): """ EOS model for reference compression path. Parameters ---------- Path can either be isothermal (T=const) or adiabatic (S=const) For this restricted path, thermodyn properties depend only on volume """ _path_opts = ['T','S','0K'] _kind_opts = ['Vinet','BirchMurn3','BirchMurn4','GenFiniteStrain', 'Tait','PolyRho'] def __init__(self, kind='Vinet', natom=1, molar_mass=100, path_const='T', order=3, model_state={}): self._pre_init(natom=natom, molar_mass=molar_mass) set_calculator(self, kind, self._kind_opts, path_const, order=order) self._set_ref_state() self._post_init(model_state=model_state) pass def __repr__(self): calc = self.calculators['compress'] return ("CompressEos(kind={kind}, natom={natom}, " "molar_mass={molar_mass}," "path_const={path_const}, order={order}, " "model_state={model_state}, " ")" .format(kind=repr(calc.name), natom=repr(self.natom), molar_mass=repr(self.molar_mass), path_const=repr(self.path_const), order=repr(calc.order), model_state=self.model_state ) ) def _set_ref_state(self): calc = self.calculators['compress'] path_const = calc.path_const energy_scale = calc.get_energy_scale() T0 = 300 # Add needed extra parameters (depending on path_const) if path_const=='T': param_ref_names = ['T0','F0'] param_ref_units = ['K','eV'] param_ref_defaults = [T0, 0.0] param_ref_scales = [T0, energy_scale] elif path_const=='S': param_ref_names = ['T0','E0'] param_ref_units = ['K','eV'] param_ref_defaults = [T0, 0.0] param_ref_scales = [T0, energy_scale] elif path_const=='0K': param_ref_names = [] param_ref_units = [] param_ref_defaults = [] param_ref_scales = [] pass else: raise NotImplementedError( 'path_const '+path_const+' is not valid for CompressEos.') self._path_const = path_const self._param_ref_names = param_ref_names self._param_ref_units = param_ref_units self._param_ref_defaults = param_ref_defaults self._param_ref_scales = param_ref_scales pass @property def path_opts(self): return self._path_opts @property def path_const(self): return self._path_const def press(self, V_a, apply_expand_adj=True): press_a = self.calculators['compress']._calc_press(V_a) return press_a def energy( self, V_a, apply_expand_adj=True ): energy0 = 0.0 try: energy0 = self.get_param_values(param_names=['F0']) except: pass try: energy0 = self.get_param_values(param_names=['E0']) except: pass energy_a = energy0 + self.calculators['compress']._calc_energy(V_a) # if self.expand_adj and apply_expand_adj: # ind_exp = self.get_ind_expand(V_a, eos_d) # if apply_expand_adj and (ind_exp.size>0): # energy_a[ind_exp] = self.expand_adj_mod._calc_energy( V_a[ind_exp], eos_d ) return energy_a def bulk_mod( self, V_a, apply_expand_adj=True ): try: bulk_mod_a = self.calculators['compress']._calc_bulk_mod(V_a) # if self.expand_adj and apply_expand_adj: # ind_exp = self.get_ind_expand(V_a) # if apply_expand_adj and (ind_exp.size>0): # bulk_mod_a[ind_exp] = # self.expand_adj_mod._calc_bulk_mod(V_a[ind_exp]) except: TOL=1e-4 P_lo_a = self.press(V_a*np.exp(-TOL/2)) P_hi_a = self.press(V_a*np.exp(+TOL/2)) bulk_mod_a = -(P_hi_a-P_lo_a)/TOL return bulk_mod_a def bulk_mod_deriv( self,V_a, apply_expand_adj=True ): bulk_mod_deriv_a = self.calculators['compress']._calc_bulk_mod_deriv(V_a) if self.expand_adj and apply_expand_adj: ind_exp = self.get_ind_expand(V_a) if apply_expand_adj and (ind_exp.size>0): bulk_mod_deriv_a[ind_exp] = self.expand_adj_mod_deriv._calc_bulk_mod_deriv(V_a[ind_exp]) return bulk_mod_deriv_a def energy_perturb( self, V_a, apply_expand_adj=True ): # Eval positive press values Eperturb_pos_a, scale_a, paramkey_a = self.calculators['compress']._calc_energy_perturb(V_a) if (self.expand_adj==False) or (apply_expand_adj==False): return Eperturb_pos_a, scale_a, paramkey_a else: Nparam_pos = Eperturb_pos_a.shape[0] scale_a, paramkey_a, ind_pos = \ self.get_param_scale(apply_expand_adj=True, output_ind=True) Eperturb_a = np.zeros((paramkey_a.size, V_a.size)) Eperturb_a[ind_pos,:] = Eperturb_pos_a # Overwrite negative pressure Expansion regions ind_exp = self.get_ind_expand(V_a) if ind_exp.size>0: Eperturb_adj_a = \ self.expand_adj_mod._calc_energy_perturb(V_a[ind_exp])[0] Eperturb_a[:,ind_exp] = Eperturb_adj_a return Eperturb_a, scale_a, paramkey_a # Standard methods must be overridden (as needed) by implimentation model def get_param_scale_sub(self): raise NotImplementedError("'get_param_scale_sub' function not implimented for this model") #################### # Required Methods # #################### #################### # Optional Methods # #################### def _calc_energy_perturb(self, V_a): """Returns Energy pertubation basis functions resulting from fractional changes to EOS params.""" fname = 'energy' scale_a, paramkey_a = self.get_param_scale( apply_expand_adj=self.expand_adj) Eperturb_a = [] for paramname in paramkey_a: iEperturb_a = self.param_deriv(fname, paramname, V_a) Eperturb_a.append(iEperturb_a) Eperturb_a = np.array(Eperturb_a) return Eperturb_a, scale_a, paramkey_a #==================================================================== #==================================================================== # Calculators #==================================================================== class CompressCalc(with_metaclass(ABCMeta, core.Calculator)): """ Abstract Equation of State class for a reference Compression Path Path can either be isothermal (T=const) or adiabatic (S=const) For this restricted path, thermodyn properties depend only on volume """ _path_opts = ['T','S','0K'] supress_energy = False supress_press = False def __init__( self, eos_mod, path_const='T', order=None, expand_adj_mod=None, expand_adj=None, supress_energy=False, supress_press=False ): assert path_const in self.path_opts, path_const + ' is not a valid ' + \ 'path const. You must select one of: ' + path_opts assert (np.isscalar(order))&(order>0)&(np.mod(order,1)==0), ( 'order must be a positive integer.') self._eos_mod = eos_mod self._init_params(order) self._required_calculators = None self._path_const = path_const self.supress_energy = supress_energy self.supress_press = supress_press # Use Expansion Adjustment for negative pressure region? if expand_adj is None: self.expand_adj = False else: self.expand_adj = expand_adj if expand_adj_mod is None: self.expand_adj = False self.expand_adj_mod = None else: self.expand_adj = True self.expand_adj_mod = expand_adj_mod pass @property def path_opts(self): return self._path_opts def get_ind_expand(self, V_a): V0 = core.get_params(['V0']) ind_exp = np.where( V_a > V0 )[0] return ind_exp @property def path_const(self): return self._path_const @property def order(self): return self._order # NEED to write infer volume function # Standard methods must be overridden (as needed) by implimentation model def get_energy_scale(self): V0, K0 = self.get_param_defaults(['V0','K0']) energy_scale = np.round(V0*K0/core.CONSTS['PV_ratio'],decimals=2) return energy_scale def get_param_scale_sub(self): raise NotImplementedError("'get_param_scale_sub' function not implimented for this model") #################### # Required Methods # #################### @abstractmethod def _init_params(self, order): """Initialize list of calculator parameter names.""" pass @abstractmethod def _calc_press(self, V_a): """Returns Press variation along compression curve.""" pass @abstractmethod def _calc_energy(self, V_a): """Returns Energy along compression curve.""" pass #################### # Optional Methods # #################### # EOS property functions def _calc_param_deriv(self, fname, paramname, V_a, dxfrac=1e-6): scale_a, paramkey_a = self.get_param_scale(apply_expand_adj=True ) scale = scale_a[paramkey_a==paramname][0] # print 'scale: ' + np.str(scale) #if (paramname is 'E0') and (fname is 'energy'): # return np.ones(V_a.shape) try: fun = getattr(self, fname) # Note that self is implicitly included val0_a = fun(V_a) except: assert False, 'That is not a valid function name ' + \ '(e.g. it should be press or energy)' try: param = core.get_params([paramname])[0] dparam = scale*dxfrac # print 'param: ' + np.str(param) # print 'dparam: ' + np.str(dparam) except: assert False, 'This is not a valid parameter name' # set param value in eos_d dict core.set_params([paramname,], [param+dparam,]) # Note that self is implicitly included dval_a = fun(V_a) - val0_a # reset param to original value core.set_params([paramname], [param]) deriv_a = dval_a/dxfrac return deriv_a def _calc_energy_perturb(self, V_a): """Returns Energy pertubation basis functions resulting from fractional changes to EOS params.""" fname = 'energy' scale_a, paramkey_a = self.get_param_scale( apply_expand_adj=self.expand_adj) Eperturb_a = [] for paramname in paramkey_a: iEperturb_a = self._calc_param_deriv(fname, paramname, V_a) Eperturb_a.append(iEperturb_a) Eperturb_a = np.array(Eperturb_a) return Eperturb_a, scale_a, paramkey_a def _calc_bulk_mod(self, V_a): """Returns Bulk Modulus variation along compression curve.""" raise NotImplementedError("'bulk_mod' function not implimented for this model") def _calc_bulk_mod_deriv(self, V_a): """Returns Bulk Modulus Deriv (K') variation along compression curve.""" raise NotImplementedError("'bulk_mod_deriv' function not implimented for this model") #==================================================================== #==================================================================== # Implementations #==================================================================== class _Vinet(CompressCalc): _name='Vinet' def _init_params(self, order): """Initialize list of calculator parameter names.""" order = 3 # ignore order input V0, K0, KP0 = 100, 150, 4 param_names = ['V0','K0','KP0'] param_units = ['ang^3','GPa','1'] param_defaults = [V0,K0,KP0] param_scales = [V0,K0,KP0] self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass def _calc_press(self, V_a): V0, K0, KP0 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0']) eta = 3/2*(KP0-1) vratio_a = V_a/V0 x_a = vratio_a**(1/3) press_a = 3*K0*(1-x_a)*x_a**(-2)*np.exp(eta*(1-x_a)) return press_a def _calc_energy(self, V_a): V0, K0, KP0 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0']) PV_ratio, = core.get_consts(['PV_ratio']) eta = 3/2*(KP0-1) vratio_a = V_a/V0 x_a = vratio_a**(1/3) energy_a = 9*K0*V0/PV_ratio/eta**2*\ (1 + (eta*(1-x_a)-1)*np.exp(eta*(1-x_a))) return energy_a # def get_param_scale_sub(self): # """Return scale values for each parameter""" # V0, K0, KP0 = core.get_params(['V0','K0','KP0']) # PV_ratio, = core.get_consts(['PV_ratio']) # paramkey_a = np.array(['V0','K0','KP0','E0']) # scale_a = np.array([V0,K0,KP0,K0*V0/PV_ratio]) # return scale_a, paramkey_a # def _calc_energy_perturb(self, V_a): # """Returns Energy pertubation basis functions resulting from fractional changes to EOS params.""" # V0, K0, KP0, E0 = core.get_params(['V0','K0','KP0','E0']) # PV_ratio, = core.get_consts(['PV_ratio']) # eta = 3/2*(KP0-1) # vratio_a = V_a/V0 # x = vratio_a**(1/3) # scale_a, paramkey_a = self.get_param_scale_sub() # # NOTE: CHECK UNITS (PV_RATIO) here # dEdp_a = 1/PV_ratio*np.vstack\ # ([-3*K0*(eta**2*x*(x-1) + 3*eta*(x-1) - 3*np.exp(eta*(x-1)) + 3)\ # *np.exp(-eta*(x-1))/eta**2, # -9*V0*(eta*(x-1) - np.exp(eta*(x-1)) + 1)*np.exp(-eta*(x-1))/eta**2, # 27*K0*V0*(2*eta*(x-1) + eta*(-x + (x-1)*(eta*(x-1) + 1) + 1) # -2*np.exp(eta*(x-1)) + 2)*np.exp(-eta*(x-1))/(2*eta**3), # PV_ratio*np.ones(V_a.shape)]) # Eperturb_a = np.expand_dims(scale_a,1)*dEdp_a # #Eperturb_a = np.expand_dims(scale_a)*dEdp_a # return Eperturb_a, scale_a, paramkey_a #==================================================================== class _BirchMurn3(CompressCalc): def _calc_press(self, V_a): V0, K0, KP0 = self.eos_mod.get_param_values(param_names=['V0','K0','KP0']) vratio_a = V_a/V0 press_a = 3/2*K0 * (vratio_a**(-7/3) - vratio_a**(-5/3)) * \ (1 + 3/4*(KP0-4)*(vratio_a**(-2/3)-1)) return press_a def _calc_energy(self, V_a): V0, K0, KP0 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0']) PV_ratio = core.CONSTS['PV_ratio'] vratio_a = V_a/V0 fstrain_a = 1/2*(vratio_a**(-2/3) - 1) energy_a = 9/2*(V0*K0/PV_ratio)*\ ( KP0*fstrain_a**3 + fstrain_a**2*(1-4*fstrain_a) ) return energy_a def _init_params(self, order): """Initialize list of calculator parameter names.""" order = 3 # ignore order input V0, K0, KP0 = 100, 150, 4 param_names = ['V0','K0','KP0'] param_units = ['ang^3','GPa','1'] param_defaults = [V0,K0,KP0] param_scales = [V0,K0,KP0] self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass #==================================================================== class _BirchMurn4(CompressCalc): # def get_param_scale_sub(self): # """Return scale values for each parameter""" # V0, K0, KP0, KP20 = core.get_params(['V0','K0','KP0','KP20']) # PV_ratio = core.CONSTS['PV_ratio'] # paramkey_a = np.array(['V0','K0','KP0','KP20','E0']) # scale_a = np.array([V0,K0,KP0,KP0/K0,K0*V0/PV_ratio]) # return scale_a, paramkey_a def _calc_strain_energy_coeffs(self, nexp, K0, KP0, KP20): a1 = 3/2*(KP0-nexp-2) a2 = 3/2*(K0*KP20 + KP0*(KP0-2*nexp-3)+3+4*nexp+11/9*nexp**2) return a1,a2 def _calc_press(self, V_a): V0, K0, KP0, KP20 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0','KP20']) nexp = +2 vratio_a = V_a/V0 fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1) a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20) press_a = 3*K0*(1+a1*fstrain_a + a2*fstrain_a**2)*\ fstrain_a*(nexp*fstrain_a+1)**((nexp+3)/nexp) return press_a def _calc_energy(self, V_a): V0, K0, KP0, KP20 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0','KP20']) nexp = +2 PV_ratio = core.CONSTS['PV_ratio'] vratio_a = V_a/V0 fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1) a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20) energy_a = 9*(V0*K0/PV_ratio)*\ ( 1/2*fstrain_a**2 + a1/3*fstrain_a**3 + a2/4*fstrain_a**4) return energy_a def _init_params(self, order): """Initialize list of calculator parameter names.""" order = 4 # ignore order input V0, K0, KP0 = 100, 150, 4 KP20 = -KP0/K0 KP20_scale = np.abs(KP20) param_names = ['V0','K0','KP0','KP20'] param_units = ['ang^3','GPa','1','GPa^-1'] param_defaults = [V0,K0,KP0,KP20] param_scales = [V0,K0,KP0,KP20_scale] self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass #==================================================================== class _GenFiniteStrain(CompressCalc): """ Generalized Finite Strain EOS from Jeanloz1989b Note: nexp=2 yields Birch Murnaghan (eulerian strain) EOS nexp=-2 yields lagragian strain EOS """ def _calc_strain_energy_coeffs(self, nexp, K0, KP0, KP20=None, KP30=None): a1 = 3/2*(KP0-nexp-2) if KP20 is None: return a1 else: a2 = 3/2*(K0*KP20 + KP0*(KP0-2*nexp-3)+3+4*nexp+11/9*nexp**2) if KP30 is None: return a1,a2 else: a3 = 1/8*(9*K0**2*KP30 + 6*(6*KP0-5*nexp-6)*K0*KP20 +((3*KP0-5*nexp-6)**2 +10*nexp**2 + 30*nexp + 18)*KP0 -(50/3*nexp**3 + 70*nexp**2 + 90*nexp + 36)) return a1,a2,a3 def _calc_press(self, V_a): V0, K0, KP0, KP20, nexp = self.eos_mod.get_param_values( param_names=['V0','K0','KP0','KP20','nexp']) vratio_a = V_a/V0 fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1) a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20=KP20) press_a = 3*K0*(1+a1*fstrain_a + a2*fstrain_a**2)*\ fstrain_a*(nexp*fstrain_a+1)**((nexp+3)/nexp) return press_a def _calc_energy(self, V_a): V0, K0, KP0, KP20, nexp = self.eos_mod.get_param_values( param_names=['V0','K0','KP0','KP20','nexp']) PV_ratio = core.CONSTS['PV_ratio'] vratio_a = V_a/V0 fstrain_a = 1/nexp*(vratio_a**(-nexp/3) - 1) a1,a2 = self._calc_strain_energy_coeffs(nexp,K0,KP0,KP20=KP20) energy_a = 9*(V0*K0/PV_ratio)*\ ( 1/2*fstrain_a**2 + a1/3*fstrain_a**3 + a2/4*fstrain_a**4) return energy_a def _init_params(self, order): """Initialize list of calculator parameter names.""" order = 4 #ignore input order V0, K0, KP0, nexp = 100, 150, 4, 2 nexp_scale = 1 KP20 = -KP0/K0 KP20_scale = np.abs(KP20) param_names = ['V0','K0','KP0','KP20','nexp'] param_units = ['ang^3','GPa','1','GPa^-1','1'] param_defaults = [V0,K0,KP0,KP20,nexp] param_scales = [V0,K0,KP0,KP20_scale,nexp_scale] self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass #==================================================================== class _Tait(CompressCalc): # def __init__( self, setlogPmin=False, # path_const='T', level_const=300, expand_adj_mod=None, # expand_adj=None, supress_energy=False, supress_press=False ): # super(Tait, self).__init__( expand_adj=None ) # self.setlogPmin = setlogPmin # pass # def __init__( self, setlogPmin=False, expand_adj=False ): # self.setlogPmin = setlogPmin # self.expand_adj = expand_adj # pass def _get_eos_params(self): V0, K0, KP0 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0']) if self.setlogPmin: logPmin, = self.eos_mod.get_param_values( param_names=['logPmin']) Pmin = np.exp(logPmin) # assert Pmin>0, 'Pmin must be positive.' KP20 = (KP0+1)*(KP0/K0 - 1/Pmin) else: KP20, = self.eos_mod.get_param_values( param_names=['KP20']) return V0,K0,KP0,KP20 # def get_param_scale_sub(self): # """Return scale values for each parameter""" # V0, K0, KP0, KP20 = self.eos_mod.get_param_values( # param_names=['V0','K0','KP0','KP20']) # PV_ratio = core.CONSTS['PV_ratio'] # if self.setlogPmin: # # [V0,K0,KP0,E0] # paramkey_a = np.array(['V0','K0','KP0','E0']) # scale_a = np.array([V0,K0,KP0,K0*V0/PV_ratio]) # else: # # [V0,K0,KP0,KP20,E0] # paramkey_a = np.array(['V0','K0','KP0','KP20','E0']) # scale_a = np.array([V0,K0,KP0,KP0/K0,K0*V0/PV_ratio]) # return scale_a, paramkey_a def _eos_to_abc_params(self, K0, KP0, KP20): a = (KP0 + 1)/(K0*KP20 + KP0 + 1) b = -KP20/(KP0+1) + KP0/K0 c = (K0*KP20 + KP0 + 1)/(-K0*KP20 + KP0**2 + KP0) return a,b,c def _calc_press(self, V_a): V0, K0, KP0, KP20 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0','KP20']) a,b,c = self._eos_to_abc_params(K0,KP0,KP20) vratio_a = V_a/V0 press_a = 1/b*(((vratio_a + a - 1)/a)**(-1/c) - 1) # from IPython import embed; import pdb; embed(); pdb.set_trace() return press_a def _calc_energy(self, V_a): V0, K0, KP0, KP20 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0','KP20']) a,b,c = self._eos_to_abc_params(K0,KP0,KP20) PV_ratio = core.CONSTS['PV_ratio'] vratio_a = V_a/V0 press_a = self._calc_press(V_a) eta_a = b*press_a + 1 eta_pow_a = eta_a**(-c) # NOTE: Need to simplify energy expression here energy_a = (V0/b)/PV_ratio*(a*c/(c-1)-1)\ - (V0/b)/PV_ratio*( a*c/(c-1)*eta_a*eta_pow_a - a*eta_pow_a + a - 1) return energy_a # def _calc_energy_perturb_deprecate(self, V_a): # """ # Returns Energy pertubation basis functions resulting from # fractional changes to EOS params. # """ # V0, K0, KP0, KP20 = self._get_eos_params() # E0, = core.get_params(['E0']) # a,b,c = self._eos_to_abc_params(K0,KP0,KP20) # PV_ratio = core.CONSTS['PV_ratio'] # vratio_a = V_a/V0 # press_a = self._calc_press(V_a) # eta_a = b*press_a + 1 # eta_pow_a = eta_a**(-c) # scale_a, paramkey_a = self.get_param_scale_sub() # # [V0,K0,KP0,KP20,E0] # dEdp_a = np.ones((4, V_a.size)) # # dEdp_a[0,:] = 1/(PV_ratio*b*(c-1))*eta_a*(-a*eta_pow_a -1 + (1-a)*(a+c)) # dEdp_a[0,:] = 1/(PV_ratio*b*(c-1))*eta_a*(-a*eta_pow_a +a -1 -a*c+c) \ # + 1/(PV_ratio*b)*(a*c/(c-1)-1) # dEdp_a[-1,:] = 1 # # from IPython import embed; embed(); import ipdb; ipdb.set_trace() # # 1x3 # dEdabc_a = np.vstack\ # ([V0*eta_a/(a*b*(c-1))*(-a*eta_pow_a + a*(1-c))+c*V0/(b*(c-1)), # V0/(b**2*(c-1))*((-a*eta_pow_a+a-1)*(c-1) + c*a*eta_a*eta_pow_a) \ # - V0/b**2*(a*c/(c-1) - 1), # -a*V0/(b*(c-1)**2)*eta_a*eta_pow_a*(-c+(c-1)*(1-np.log(eta_a)))\ # +a*V0/(b*(c-1))*(1-c/(c-1))]) # # 3x3 # abc_jac = np.array([[-KP20*(KP0+1)/(K0*KP20+KP0+1)**2, # K0*KP20/(K0*KP20+KP0+1)**2, # -K0*(KP0+1)/(K0*KP20+KP0+1)**2], # [-KP0/K0**2, KP20/(KP0+1)**2 + 1/K0, -1/(KP0+1)], # [KP20*(KP0**2+2*KP0+1)/(-K0*KP20+KP0**2+KP0)**2, # (-K0*KP20+KP0**2+KP0-(2*KP0+1)*(K0*KP20+KP0+1))/\ # (-K0*KP20+KP0**2+KP0)**2, # K0*(KP0**2+2*KP0+1)/(-K0*KP20+KP0**2+KP0)**2]]) # dEdp_a[1:4,:] = 1/PV_ratio*np.dot(abc_jac.T,dEdabc_a) # print(dEdp_a.shape) # if self.setlogPmin: # # [V0,K0,KP0,E0] # print(dEdp_a.shape) # dEdp_a = dEdp_a[[0,1,2,4],:] # Eperturb_a = np.expand_dims(scale_a,1)*dEdp_a # #Eperturb_a = np.expand_dims(scale_a)*dEdp_a # return Eperturb_a, scale_a, paramkey_a def _init_params(self, order): """Initialize list of calculator parameter names.""" order = 4 # ignore input order V0, K0, KP0 = 100, 150, 4 KP20 = -KP0/K0 KP20_scale = np.abs(KP20) param_names = ['V0','K0','KP0','KP20'] param_units = ['ang^3','GPa','1','GPa^-1'] param_defaults = [V0,K0,KP0,KP20] param_scales = [V0,K0,KP0,KP20_scale] self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass #==================================================================== class _PolyRho(CompressCalc): """ Needed for Spera 2011 """ # def __init__(self, eos_mod, path_const='T', order=5, mass=100 ): # def _get_coef_array(self): # basename = 'Pcoef' # param_names = core.make_array_param_defaults(basename, self.order) # param_values = np.array(self.eos_mod.get_param_values( # param_names=param_names)) # coef_index = core.get_array_param_index(param_names) # order = np.max(coef_index) # param_full = np.zeros(order) # param_full[coef_index] = param_values def _vol_to_rho(self, V): rho = (self.eos_mod.molar_mass/V)*(core.CONSTS['ang3percc']/core.CONSTS['Nmol']) return rho def _rho_to_vol(self, rho): V = (self.eos_mod.molar_mass/rho)*(core.CONSTS['ang3percc']/core.CONSTS['Nmol']) return V def _get_poly_coef(self): param_names = self.eos_mod.get_array_param_names('Pcoef') param_values = self.eos_mod.get_param_values(param_names=param_names) V0, = self.eos_mod.get_param_values(param_names=['V0']) rho0 = self._vol_to_rho(V0) coef_index = core.get_array_param_index(param_names) order = np.max(coef_index)+1 param_full = np.zeros(order) param_full[coef_index] = param_values coef_a = np.flipud(param_full) return coef_a, rho0 def _get_unshifted_poly_coef(self): coef_a, rho0 = self._get_poly_coef() order = coef_a.size Pcoef_a = coef_a*rho0**np.flipud(np.arange(order)) core.simplify_poly(coef_a) def _calc_press(self, V_a): V_a = core.fill_array(V_a) coef_a, rho0 = self._get_poly_coef() rho_a = self._vol_to_rho(V_a) order = coef_a.size Pcoef_a = coef_a*rho0**np.flipud(np.arange(order)) x = rho_a/rho0 press_a = np.polyval(Pcoef_a, x-1) return press_a def _calc_energy(self, V_a): V_a = core.fill_array(V_a) PV_ratio = core.CONSTS['PV_ratio'] coef_a, rho0 = self._get_poly_coef() rho_a = self._vol_to_rho(V_a) order = coef_a.size Pcoef_a = coef_a*rho0**np.flipud(np.arange(order)) x = rho_a/rho0 press_a = np.polyval(Pcoef_a, x-1) core.simplify_poly(Pcoef_a) V0, = self.eos_mod.get_param_values(param_names=['V0']) coef_a, rho0 = self._get_poly_coef() coef_rev_a = np.flipud(coef_a) order = coef_a.size coef_exp_a = np.flipud(np.arange(0,order)) energy_a = np.zeros(V_a.shape) energy_a += coef_rev_a[0]*(V_a-V0)*PV_ratio energy_a += coef_rev_a[1]*np.log(V_a/V0)*PV_ratio for deg in range(2,order): energy_a += coef_rev_a[deg]*() return energy_a def get_energy_scale(self): V0, dPdrho = self.get_param_defaults(['V0','_Pcoef_1']) rho0 = self._vol_to_rho(V0) K0 = rho0*dPdrho energy_scale = np.round(V0*K0/core.CONSTS['PV_ratio'],decimals=2) return energy_scale def _init_params(self, order): """Initialize list of calculator parameter names.""" rho0 = 2.58516 coef_basename = 'Pcoef' param_names = core.make_array_param_names(coef_basename, order, skipzero=True) param_values_sio2 = [8.78411, 12.08481, -5.5986, 4.92863, -0.90499] if order>6: param_defaults = [0 for ind in range(1,order)] param_defaults[0:5] = param_values_sio2 else: param_defaults = param_values_sio2[0:order-1] param_scales = [1 for ind in range(1,order)] param_units = core.make_array_param_units(param_names, base_unit='GPa', deriv_unit='(g/cc)') V0 = self._rho_to_vol(rho0) param_names.append('V0') param_scales.append(V0) param_units.append('ang^3') param_defaults.append(V0) self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass #==================================================================== class _PolyV(CompressCalc): _name='PolyV' def _init_params(self, order): """Initialize list of calculator parameter names.""" order = 3 # ignore order input V0, K0, KP0 = 100, 150, 4 param_names = ['V0','K0','KP0'] param_units = ['ang^3','GPa','1'] param_defaults = [V0,K0,KP0] param_scales = [V0,K0,KP0] self._set_params(param_names, param_units, param_defaults, param_scales, order=order) pass def _calc_press(self, V_a): V0, K0, KP0 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0']) eta = 3/2*(KP0-1) vratio_a = V_a/V0 x_a = vratio_a**(1/3) press_a = 3*K0*(1-x_a)*x_a**(-2)*np.exp(eta*(1-x_a)) return press_a def _calc_energy(self, V_a): V0, K0, KP0 = self.eos_mod.get_param_values( param_names=['V0','K0','KP0']) PV_ratio, = core.get_consts(['PV_ratio']) eta = 3/2*(KP0-1) vratio_a = V_a/V0 x_a = vratio_a**(1/3) energy_a = 9*K0*V0/PV_ratio/eta**2*\ (1 + (eta*(1-x_a)-1)*np.exp(eta*(1-x_a))) return energy_a # def get_param_scale_sub(self): # """Return scale values for each parameter""" # V0, K0, KP0 = core.get_params(['V0','K0','KP0']) # PV_ratio, = core.get_consts(['PV_ratio']) # paramkey_a = np.array(['V0','K0','KP0','E0']) # scale_a = np.array([V0,K0,KP0,K0*V0/PV_ratio]) # return scale_a, paramkey_a # def _calc_energy_perturb(self, V_a): # """Returns Energy pertubation basis functions resulting from fractional changes to EOS params.""" # V0, K0, KP0, E0 = core.get_params(['V0','K0','KP0','E0']) # PV_ratio, = core.get_consts(['PV_ratio']) # eta = 3/2*(KP0-1) # vratio_a = V_a/V0 # x = vratio_a**(1/3) # scale_a, paramkey_a = self.get_param_scale_sub() # # NOTE: CHECK UNITS (PV_RATIO) here # dEdp_a = 1/PV_ratio*np.vstack\ # ([-3*K0*(eta**2*x*(x-1) + 3*eta*(x-1) - 3*np.exp(eta*(x-1)) + 3)\ # *np.exp(-eta*(x-1))/eta**2, # -9*V0*(eta*(x-1) - np.exp(eta*(x-1)) + 1)*np.exp(-eta*(x-1))/eta**2, # 27*K0*V0*(2*eta*(x-1) + eta*(-x + (x-1)*(eta*(x-1) + 1) + 1) # -2*np.exp(eta*(x-1)) + 2)*np.exp(-eta*(x-1))/(2*eta**3), # PV_ratio*np.ones(V_a.shape)]) # Eperturb_a = np.expand_dims(scale_a,1)*dEdp_a # #Eperturb_a = np.expand_dims(scale_a)*dEdp_a # return Eperturb_a, scale_a, paramkey_a #====================================================================
mit
6,094,169,553,511,042,000
32.871345
107
0.512978
false
2.98198
false
false
false
pointhi/searx
searx/plugins/tracker_url_remover.py
1
1397
''' searx is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. searx is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with searx. If not, see < http://www.gnu.org/licenses/ >. (C) 2015 by Adam Tauber, <asciimoo@gmail.com> ''' from flask_babel import gettext import re from urlparse import urlunparse regexes = {re.compile(r'utm_[^&]+&?'), re.compile(r'(wkey|wemail)[^&]+&?'), re.compile(r'&$')} name = gettext('Tracker URL remover') description = gettext('Remove trackers arguments from the returned URL') default_on = True def on_result(request, ctx): query = ctx['result']['parsed_url'].query if query == "": return True for reg in regexes: query = reg.sub('', query) if query != ctx['result']['parsed_url'].query: ctx['result']['parsed_url'] = ctx['result']['parsed_url']._replace(query=query) ctx['result']['url'] = urlunparse(ctx['result']['parsed_url']) return True
agpl-3.0
-4,441,933,707,822,547,500
30.75
87
0.688618
false
3.775676
false
false
false
teagles/teagles-pc
24/24.py
1
3371
#!/usr/bin/env python # http://butter:fly@www.pythonchallenge.com/pc/hex/ambiguity.html import os import sys import Image import requests import collections import ImageDraw IMAGE_URL = 'http://butter:fly@www.pythonchallenge.com/pc/hex/maze.png' FNAME = '24/maze.png' Point = collections.namedtuple('Point', ['x', 'y']) class MazeTurtle: DIRECTIONS = {'north': lambda p: Point(p.x, p.y - 1), 'east': lambda p: Point(p.x + 1, p.y), 'south': lambda p: Point(p.x, p.y + 1), 'west': lambda p: Point(p.x - 1, p.y)} def __init__(self, img, start_point, goal_point, wall_colour): self.img = img self.pix = img.load() self.point = start_point self.goal_point = goal_point self.wall_colour = wall_colour self.visited = set() self.path = [] self.branches = [] self.dead_ends = [] self.im_num = 0 def valid_point(self, p): return (p.x >= 0 and p.y >= 0 and p.x < self.img.size[0] and p.y < self.img.size[1]) def scout(self): possibilities = [] for fp in MazeTurtle.DIRECTIONS.values(): pp = fp(self.point) if self.valid_point(pp): if self.pix[pp] != self.wall_colour: if pp not in self.visited: possibilities.append(pp) return possibilities def victory(self): return (self.goal_point.x == self.point.x and self.goal_point.y == self.point.y) def find_path(self): while (not self.victory()): self.path.append(self.point) self.visited.add(self.point) possibilities = self.scout() if len(possibilities) > 0: if len(possibilities) > 1: self.branches.append((len(self.path), possibilities)) self.point = possibilities[0] else: #print self.path #print self.branches self.dead_ends.append(self.path[self.branches[-1][0]:]) del self.path[self.branches[-1][0]:] del self.branches[-1][1][0] #self.show_path() #raw_input('Continue:') self.point = self.branches[-1][1][0] if len(self.branches[-1][1]) is 1: del self.branches[-1] self.path.append(self.point) return self.path def show_path(self): temp = self.img.copy() draw = ImageDraw.Draw(temp) for dp in self.path: draw.point(dp, fill='green') for de in self.dead_ends[:-1]: for dp in de: #draw.point(dp, fill='blue') pass for dp in self.dead_ends[-1]: #draw.point(dp, fill='purple') pass temp.save('24/img%d.png' % self.im_num, quality=100) self.im_num = self.im_num + 1 # 24/24.py def main(args=None): if args is None: args = sys.argv[1:] if not os.path.isfile(FNAME): with open(FNAME, 'wb') as f: f.write(requests.get(IMAGE_URL).content) img = Image.open(FNAME) turtle = MazeTurtle(img, Point(639, 0), Point(1, 640), 25) print turtle.find_path() turtle.show_path() if __name__ == '__main__': main()
mit
9,179,901,707,781,394,000
30.801887
73
0.5221
false
3.468107
false
false
false
MJB47/Jokusoramame
joku/cogs/mod.py
1
13299
""" Non-generic moderation cog. """ import asyncio import collections import random import discord from discord.ext import commands from joku.cogs._common import Cog from joku.core import checks from joku.core.bot import Context from joku.core.checks import mod_command, bot_has_permissions from joku.core.utils import get_role class Moderation(Cog): """ Non-generic moderation cog. """ async def on_member_remove(self, member: discord.Member): # Rolestate await self.bot.database.save_rolestate(member) async def on_member_join(self, member: discord.Member): # Rolestate setting = await self.bot.database.get_setting(member.guild, "rolestate", {}) if setting.get("status") == 1: rolestate = await self.bot.database.get_rolestate_for_member(member) if rolestate is None: return roles = [discord.utils.get(member.guild.roles, id=r_id) for r_id in rolestate.roles] roles = [r for r in roles if r is not None] await member.edit(roles=roles) if rolestate.nick: await member.edit(nick=rolestate.nick) async def on_message(self, message: discord.Message): # Anti mention spam # Use a set here as we only want to ban unique mentions mentions = len(set(message.mentions) & set(message.role_mentions)) if mentions < 3: # dont bother loading from DB return if message.guild is None: return if message.author.top_role >= message.guild.me.top_role or message.author == message.guild.owner: # can't ban anyway return c = await self.bot.database.get_setting(message.guild, "mention_spam", { "enabled": False, "threshold": 5 }) if c["enabled"] is True: if mentions == c["threshold"]: guild = message.guild # type: discord.Guild await guild.ban(message.author) await message.channel.send("Member **{}** was automatically banned for going over the mention spam " "limit.".format(message.author)) # anti mention spam @commands.group(pass_context=True, invoke_without_command=True) @checks.has_permissions(ban_members=True) @mod_command() async def antimention(self, ctx: Context, *, status: str = None): """ Toggles the antimention status in this server. """ previous = await ctx.bot.database.get_setting(ctx.guild, "mention_spam", { "enabled": False, "threshold": 5 }) if status is None or status not in ["on", "off"]: current_status = previous.get("enabled", False) if current_status: await ctx.send("Anti-mention spam is currently **on**.") else: await ctx.send("Anti-mention spam is currently **off**.") return if status == "on": await ctx.bot.database.set_setting(ctx.guild, "mention_spam", **{ "enabled": True, "threshold": previous["threshold"] }) await ctx.send(":heavy_check_mark: Enabled anti-mention spam.") elif status == "off": await ctx.bot.database.set_setting(ctx.guild, "mention_spam", **{ "enabled": False, "threshold": previous["threshold"] }) await ctx.send(":heavy_check_mark: Disabled anti-mention spam.") @antimention.command() @checks.has_permissions(ban_members=True) @mod_command() async def threshold(self, ctx: Context, threshold: int): """ Changes the threshold for anti-mention spam to ban at. """ if threshold < 3: await ctx.send(":x: Cannot set a threshold lower than 3.") return previous = await ctx.bot.database.get_setting(ctx.guild, "mention_spam", { "enabled": False, "threshold": 5 }) await ctx.bot.database.set_setting(ctx.guild, "mention_spam", enabled=previous["enabled"], threshold=threshold) await ctx.send(":heavy_check_mark: Set anti-mention spam threshold to {}.".format(threshold)) @commands.command(pass_context=True) @checks.has_permissions(ban_members=True) @bot_has_permissions(ban_members=True) @mod_command() async def xban(self, ctx: Context, user_id: int): """ Cross-bans a user. """ if user_id in [m.id for m in ctx.message.guild.members]: await ctx.channel.send(":x: This command is used for banning members not in the server.") return try: user = await ctx.bot.get_user_info(user_id) await ctx.bot.http.ban(user_id, ctx.message.guild.id, 0) except discord.Forbidden: await ctx.channel.send(":x: 403 FORBIDDEN") except discord.NotFound: await ctx.channel.send(":x: User not found.") else: await ctx.channel.send(":heavy_check_mark: Banned user {}.".format(user.name)) @commands.group(pass_context=True, invoke_without_command=True) @checks.has_permissions(manage_guild=True, manage_roles=True) @mod_command() async def rolestate(self, ctx: Context, *, status: str = None): """ Manages rolestate. This will automatically save roles for users who have left the server. """ if status is None: # Check the status. setting = await ctx.bot.database.get_setting(ctx.message.guild, "rolestate", {}) if setting.get("status") == 1: await ctx.channel.send("Rolestate is currently **on.**") else: await ctx.channel.send("Rolestate is currently **off.**") else: if status.lower() == "on": await ctx.bot.database.set_setting(ctx.message.guild, "rolestate", status=1) await ctx.channel.send(":heavy_check_mark: Turned Rolestate on.") return elif status.lower() == "off": await ctx.bot.database.set_setting(ctx.message.guild, "rolestate", status=0) await ctx.channel.send(":heavy_check_mark: Turned Rolestate off.") return else: await ctx.channel.send(":x: No.") @rolestate.command() @checks.has_permissions(manage_guild=True, manage_roles=True) @mod_command() async def view(self, ctx: Context, *, user_id: int = None): """ Views the current rolestate of a member. """ if user_id is None: user_id = ctx.author.id rolestate = await self.bot.database.get_rolestate_for_id(ctx.guild.id, user_id) user = await ctx.bot.get_user_info(user_id) # type: discord.User em = discord.Embed(title="Rolestate viewer") if rolestate is None: em.description = "**No rolestate found for this user here.**" em.colour = discord.Colour.red() else: em.description = "This shows the most recent rolestate for a user ID. This is **not accurate** if they " \ "haven't left before, or are still in the guild." em.add_field(name="Username", value=user.name) em.add_field(name="Nick", value=rolestate.nick, inline=False) roles = ", ".join([get_role(ctx.guild, r_id).mention for r_id in rolestate.roles if r_id != ctx.guild.id]) em.add_field(name="Roles", value=roles, inline=False) em.colour = discord.Colour.light_grey() em.set_thumbnail(url=user.avatar_url) em.set_footer(text="Rolestate for guild {}".format(ctx.guild.name)) await ctx.send(embed=em) @commands.command(pass_context=True) @commands.cooldown(rate=1, per=5 * 60, type=commands.BucketType.guild) @checks.has_permissions(kick_members=True) @mod_command() async def islandbot(self, ctx: Context): """ Who will be voted off of the island? """ message = ctx.message # type: discord.Message channel = message.channel # Begin the raffle! timeout = random.randrange(30, 60) await ctx.channel.send(":warning: :warning: :warning: Raffle ends in **{}** seconds!".format(timeout)) # messages to collect messages = [] async def _inner(): # inner closure point - this is killed by asyncio.wait() while True: next_message = await ctx.bot.wait_for("message", check=lambda m: m.channel == channel) if next_message.author == message.guild.me: continue # Do some checks on the user to make sure we can kick them. if next_message.author.guild_permissions.administrator: continue if next_message.author.top_role >= message.guild.me.top_role: continue messages.append(next_message) try: # wait for the waiter, but discard it when we're done await asyncio.wait_for(_inner(), timeout=timeout) except asyncio.TimeoutError: pass # gather all the users in the messages authors = list({m.author for m in messages}) # Choose some random people from the authors. chosen = [] for x in range(0, min(len(authors), 5)): r = random.choice(authors) chosen.append(r) authors.remove(r) if not chosen: await ctx.channel.send(":x: Nobody entered the raffle") return fmt = ":island: These people are up for vote:\n\n{}\n\nMention to vote.".format( "\n".join(m.mention for m in chosen) ) await ctx.channel.send(fmt) votes = [] voted = [] async def _inner2(): while True: next_message = await ctx.bot.wait_for("message", check=lambda m: m.channel == channel) # Ignore bots. if next_message.author.bot: continue # No double voting. if next_message.author in voted: continue # They didn't mention anyone. if not next_message.mentions: continue # Check the first mention. m = next_message.mentions[0] # You can't vote for somebody not in the raffle! if m not in chosen: continue if m == next_message.author: await ctx.send("I am not a tool for assisted suicide") continue # Add them to the votes, and add the author to the voted count. votes.append(m) voted.append(next_message.author) try: # wait for the waiter, but discard it when we're done await asyncio.wait_for(_inner2(), timeout=timeout) except asyncio.TimeoutError: pass # Count the votes. counted = collections.Counter(votes) try: winner = counted.most_common()[0] except IndexError: await ctx.channel.send(":bomb: Nobody voted") return await ctx.channel.send(":medal: The winner is {}, with `{}` votes!".format(winner[0].mention, winner[1])) try: await winner[0].send("You have been voted off the island.") except discord.HTTPException: pass try: await ctx.guild.kick(winner[0]) except discord.HTTPException: await ctx.send(channel, "The island is rigged") @commands.command(pass_context=True) @checks.has_permissions(manage_nicknames=True) @mod_command() async def massnick(self, ctx: Context, prefix: str = "", suffix: str = ""): """ Mass-nicknames an entire server. """ coros = [] for member in ctx.message.guild.members: coros.append(member.edit(nick=prefix + member.name + suffix)) fut = asyncio.gather(*coros, return_exceptions=True, loop=ctx.bot.loop) async with ctx.channel.typing(): await fut count = sum(1 for i in fut.result() if not isinstance(i, Exception)) forbidden = sum(1 for i in fut.result() if isinstance(i, discord.Forbidden)) httperror = sum(1 for i in fut.result() if isinstance(i, discord.HTTPException)) - forbidden failed = ctx.message.guild.member_count - count await ctx.channel.send( ":heavy_check_mark: Updated `{}` nicknames - failed to change `{}` nicknames. " "(`{}` forbidden, `{}` too long/other)".format(count, failed, forbidden, httperror) ) setup = Moderation.setup
mit
-5,663,790,421,347,314,000
36.78125
119
0.560117
false
4.119888
false
false
false
Godley/Music-Library
implementation/primaries/GUI/PlaylistDialog.py
1
2929
from PyQt4 import QtCore, QtGui, uic import os from implementation.primaries.GUI.helpers import get_base_dir from implementation.primaries.GUI import themedWindow class PlaylistDialog(QtGui.QDialog, themedWindow.ThemedWindow): def __init__(self, app, theme, themes, design_folder): QtGui.QDialog.__init__(self) themedWindow.ThemedWindow.__init__(self, theme, themes) self.qApp = app self.theme = theme self.design_folder = design_folder def load(self): path_to_file = os.path.join(self.design_folder, "NewPlaylist.ui") uic.loadUi(path_to_file, self) self.autoCompleteFrame.hide() self.buttonBox.accepted.connect(self.newPlaylistOk) self.autoCompleteBox.itemDoubleClicked.connect(self.itemClicked) self.piecesLineEdit.editingFinished.connect(self.onInactiveSearchBar) self.deleteItem.clicked.connect(self.removeItem) self.piecesLineEdit.textChanged.connect(self.updateOptions) self.piecesLineEdit.editingFinished.connect(self.onInactiveSearchBar) self.applyTheme() def removeItem(self): listItems = self.listWidget.selectedItems() if not listItems: return for item in listItems: self.listWidget.takeItem(self.listWidget.row(item)) self.listWidget.show() def newPlaylistOk(self): data = {"name": self.playlistNameLineEdit.text(), "pieces": []} for i in range(self.listWidget.count()): item = self.listWidget.item(i) fname = item.data(2) data["pieces"].append(fname) self.qApp.addPlaylist(data) def updateOptions(self): text = self.piecesLineEdit.text() results = self.qApp.queryNotThreaded(text) self.autoCompleteBox.clear() for key in results: item = QtGui.QTreeWidgetItem(key) item.setData(0, 0, key) self.autoCompleteBox.addTopLevelItem(item) for file in results[key]: fitem = QtGui.QTreeWidgetItem(file[0]) fitem.setData(0, 0, file[1]) item.addChild(fitem) if len(results) == 0: pass else: pass self.autoCompleteBox.show() self.autoCompleteFrame.show() def onInactiveSearchBar(self): if self.piecesLineEdit.text() == "" or self.piecesLineEdit.text( ) == " " or self.autoCompleteBox.topLevelItemCount() == 0 or self.focusWidget() != self.autoCompleteBox: self.autoCompleteBox.clear() self.autoCompleteFrame.hide() self.autoCompleteBox.hide() else: self.updateOptions() def itemClicked(self, current_item): fname = current_item.data(0, 0) item = QtGui.QListWidgetItem(fname) self.listWidget.addItem(item) self.listWidget.show() self.autoCompleteFrame.hide()
gpl-2.0
-3,475,940,596,530,879,000
36.075949
112
0.637078
false
3.915775
false
false
false
bks/veusz
veusz/dialogs/export.py
1
18397
# Copyright (C) 2014 Jeremy S. Sanders # Email: Jeremy Sanders <jeremy@jeremysanders.net> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. ############################################################################## from __future__ import division, print_function import os import os.path from .. import qtall as qt4 from .. import setting from .. import utils from .. import document from ..compat import citems, cstrerror, cstr, cgetcwd from .veuszdialog import VeuszDialog def _(text, disambiguation=None, context='ExportDialog'): """Translate text.""" return qt4.QCoreApplication.translate(context, text, disambiguation) # formats which can have multiple pages multipageformats = set(('ps', 'pdf')) bitmapformats = set(('png', 'bmp', 'jpg', 'tiff', 'xpm')) # map formats to names of radio buttons formatradio = ( ('pdf', 'radioFormatPDF'), ('eps', 'radioFormatEPS'), ('ps', 'radioFormatPS' ), ('svg', 'radioFormatSVG'), ('emf', 'radioFormatEMF'), ('png', 'radioFormatPNG'), ('bmp', 'radioFormatBMP'), ('jpg', 'radioFormatJPG'), ('tiff', 'radioFormatTIFF'), ('xpm', 'radioFormatXPM'), ) class ExportDialog(VeuszDialog): """Export dialog.""" def __init__(self, mainwindow, doc, docfilename): """Setup dialog.""" VeuszDialog.__init__(self, mainwindow, 'export.ui') self.document = doc doc.signalModified.connect(self.updatePagePages) self.updatePagePages() # change 'Save' button to 'Export' self.buttonBox.button(qt4.QDialogButtonBox.Save).setText(_('Export')) # these are mappings between filetypes and radio buttons self.fmtradios = dict([(f, getattr(self, r)) for f, r in formatradio]) self.radiofmts = dict([(getattr(self, r), f) for f, r in formatradio]) # get allowed types (some formats are disabled if no helper) docfmts = set() for types, descr in document.Export.getFormats(): docfmts.update(types) # disable type if not allowed for fmt, radio in citems(self.fmtradios): if fmt not in docfmts: radio.setEnabled(False) # connect format radio buttons def fmtclicked(f): return lambda: self.formatClicked(f) for r, f in citems(self.radiofmts): r.clicked.connect(fmtclicked(f)) # connect page radio buttons self.radioPageSingle.clicked.connect(lambda: self.pageClicked('single')) self.radioPageAll.clicked.connect(lambda: self.pageClicked('all')) self.radioPagePages.clicked.connect(lambda: self.pageClicked('pages')) # other controls self.checkMultiPage.clicked.connect(self.updateSingleMulti) self.buttonBrowse.clicked.connect(self.browseClicked) setdb = setting.settingdb eloc = setdb['dirname_export_location'] # where to export file if eloc == 'doc': self.dirname = os.path.dirname(os.path.abspath(docfilename)) elif eloc == 'cwd': self.dirname = cgetcwd() else: # 'prev' self.dirname = setdb.get('dirname_export', qt4.QDir.homePath()) # set default filename ext = setdb.get('export_format', 'pdf') if not docfilename: docfilename = 'export' self.docname = os.path.splitext(os.path.basename(docfilename))[0] self.formatselected = ext self.pageselected = setdb.get('export_page', 'single') self.checkMultiPage.setChecked(setdb.get('export_multipage', True)) self.updateSingleMulti() self.checkOverwrite.setChecked(setdb.get('export_overwrite', False)) self.exportSVGTextAsText.setChecked(setdb['export_SVG_text_as_text']) self.exportAntialias.setChecked(setdb['export_antialias']) self.exportQuality.setValue(setdb['export_quality']) # validate and set DPIs dpis = ('72', '75', '90', '96', '100', '150', '200', '300') for cntrl in self.exportDPI, self.exportDPISVG, self.exportDPIPDF: cntrl.addItems(dpis) cntrl.setValidator(qt4.QIntValidator(10, 10000, self)) self.exportDPI.setEditText(str(setdb['export_DPI'])) self.exportDPISVG.setEditText(str(setdb['export_DPI_SVG'])) self.exportDPIPDF.setEditText(str(setdb['export_DPI_PDF'])) # button to change bitmap background self.exportBackgroundButton.clicked.connect( self.slotExportBackgroundClicked) self.updateExportBackground(setdb['export_background']) # set correct format self.fmtradios[ext].click() # regexp for comma separated ranges valre = qt4.QRegExp( r'^[0-9]+(\s*-\s*[0-9]+)?(\s*,\s*[0-9]+(\s*-\s*[0-9]+)?)*$') valid = qt4.QRegExpValidator(valre, self) self.editPagePages.setValidator(valid) # set page mode { 'range': self.radioPageSingle, # compatibility 'single': self.radioPageSingle, 'all': self.radioPageAll, 'pages': self.radioPagePages, }[self.pageselected].click() # label showing success/failure self.labelStatus.clear() # fix height as widgets are hidden width = self.size().width() self.adjustSize() self.resize(width, self.size().height()) def formatClicked(self, fmt): """If the format is changed.""" setting.settingdb['export_format'] = fmt self.formatselected = fmt self.checkMultiPage.setEnabled(fmt in multipageformats) for c in (self.exportAntialias, self.exportDPI, self.labelDPI, self.exportBackgroundButton, self.labelBackgroundButton): c.setVisible(fmt in bitmapformats) for c in (self.exportDPIPDF, self.labelDPIPDF, self.exportColor, self.labelColor): c.setVisible(fmt in ('pdf', 'ps', 'eps')) for c in (self.exportQuality, self.labelQuality): c.setVisible(fmt == 'jpg') for c in (self.exportSVGTextAsText, self.labelSVGTextAsText, self.exportDPISVG, self.labelDPISVG): c.setVisible(fmt == 'svg') self.updateSingleMulti() filename = os.path.splitext(self.editFileName.text())[0] + '.' + fmt self.editFileName.setText(filename) def pageClicked(self, page): """If page type is set.""" setting.settingdb['export_page'] = page self.pageselected = page self.updateSingleMulti() self.editPagePages.setEnabled(page=='pages') def browseClicked(self): """Browse for file.""" setdb = setting.settingdb # File types we can export to in the form ([extensions], Name) fd = qt4.QFileDialog(self, _('Export page')) filename = self.editFileName.text() dirname = os.path.dirname(self.editFileName.text()) fd.setDirectory(dirname if dirname else self.dirname) fd.setFileMode(qt4.QFileDialog.AnyFile) fd.setAcceptMode(qt4.QFileDialog.AcceptSave) fd.setOptions(qt4.QFileDialog.DontConfirmOverwrite) # Create a mapping between a format string and extensions filtertoext = {} # convert extensions to filter exttofilter = {} filters = [] # a list of extensions which are allowed validextns = [] formats = document.Export.getFormats() for extns, name in formats: extensions = " ".join(["*." + item for item in extns]) # join eveything together to make a filter string filterstr = '%s (%s)' % (name, extensions) filtertoext[filterstr] = extns for e in extns: exttofilter[e] = filterstr filters.append(filterstr) validextns += extns fd.setNameFilters(filters) fd.selectNameFilter(exttofilter[setdb['export_format']]) filename = self.editFileName.text() dirname = os.path.dirname(os.path.abspath(filename)) if os.path.isdir(dirname): fd.selectFile(filename) if fd.exec_() == qt4.QDialog.Accepted: # convert filter to extension filterused = str(fd.selectedNameFilter()) chosenext = filtertoext[filterused][0] filename = fd.selectedFiles()[0] fileext = os.path.splitext(filename)[1][1:] if fileext not in validextns or fileext != chosenext: filename += "." + chosenext self.editFileName.setText(filename) self.fmtradios[chosenext].click() def isMultiFile(self): """Is output going to be multiple pages?""" multifile = self.pageselected != 'single' if (self.formatselected in multipageformats and self.checkMultiPage.isChecked()): multifile = False return multifile def updateSingleMulti(self, _oldmulti=[None]): """Change filename according to selected single or multi button.""" setting.settingdb['export_multipage'] = self.checkMultiPage.isChecked() multifile = self.isMultiFile() if multifile: templ = setting.settingdb['export_template_multi'] else: templ = setting.settingdb['export_template_single'] newfilename = os.path.join( self.dirname, templ.replace('%DOCNAME%', self.docname) + '.' + self.formatselected) # only change if multi format status has changed or is # uninitialised if multifile is not getattr(self, '_oldsinglemulti', None): self.editFileName.setText(newfilename) self._oldsinglemulti = multifile def updatePagePages(self): """Update widgets allowing user to set ranges of pages.""" npages = self.document.getNumberPages() if npages == 0: return text = '%i-%i' % (1, npages) self.editPagePages.setText(text) @qt4.pyqtSlot() def clearLabel(self): """Clear label. Defined as a slot to work around PyQt C++ object deleted bug. """ self.labelStatus.clear() def showMessage(self, text): """Show a message in a label, clearing after a time.""" self.labelStatus.setText(text) qt4.QTimer.singleShot(3000, self.clearLabel) def updateExportBackground(self, colorname): """Update color on export background.""" pixmap = qt4.QPixmap(16, 16) col = self.document.evaluate.colors.get(colorname) pixmap.fill(col) # update button (storing color in button itself - what fun!) self.exportBackgroundButton.setIcon(qt4.QIcon(pixmap)) self.exportBackgroundButton.iconcolor = colorname def slotExportBackgroundClicked(self): """Button clicked to change background.""" qcolor = self.document.evaluate.colors.get( self.exportBackgroundButton.iconcolor) color = qt4.QColorDialog.getColor( qcolor, self, "Choose color", qt4.QColorDialog.ShowAlphaChannel ) if color.isValid(): self.updateExportBackground(utils.extendedColorFromQColor(color)) def getPagePages(self): """Get list of entered pages.""" txt = self.editPagePages.text() parts = txt.split(',') pages = [] for p in parts: p = p.replace(' ', '') try: if p.find('-')>=0: rng = p.split('-') pages += list(range(int(rng[0])-1, int(rng[1]))) else: pages.append(int(p)-1) except ValueError: # convertsion error raise RuntimeError(_('Error: invalid list of pages')) # check in range for pg in pages: if pg<0 or pg>=self.document.getNumberPages(): raise RuntimeError(_('Error: pages out of range')) return pages def accept(self): """Do the export""" if self.document.getNumberPages() == 0: self.showMessage(_('Error: no pages in document')) return filename = self.editFileName.text() if (self.isMultiFile() and '%PAGENAME%' not in filename and '%PAGE%' not in filename and '%PAGE00%' not in filename and '%PAGE000%' not in filename): self.showMessage( _('Error: page name or number must be in filename')) return if self.pageselected == 'single': pages = [self.mainwindow.plot.getPageNumber()] elif self.pageselected == 'all': pages = list(range(self.document.getNumberPages())) elif self.pageselected == 'pages': try: pages = self.getPagePages() except RuntimeError as e: self.showMessage(str(e)) return setdb = setting.settingdb # update settings from controls setdb['export_overwrite'] = self.checkOverwrite.isChecked() setdb['export_antialias'] = self.exportAntialias.isChecked() setdb['export_quality'] = self.exportQuality.value() setdb['export_color'] = self.exportColor.currentIndex() == 0 setdb['export_background'] = self.exportBackgroundButton.iconcolor setdb['export_SVG_text_as_text'] = self.exportSVGTextAsText.isChecked() # update dpi if possible # FIXME: requires some sort of visual notification of validator for cntrl, setn in ( (self.exportDPI, 'export_DPI'), (self.exportDPIPDF, 'export_DPI_PDF'), (self.exportDPISVG, 'export_DPI_SVG')): try: text = cntrl.currentText() valid = cntrl.validator().validate(text, 0)[0] if valid == qt4.QValidator.Acceptable: setdb[setn] = int(text) except ValueError: pass export = document.Export( self.document, '', # filename [0], # page numbers bitmapdpi=setdb['export_DPI'], pdfdpi=setdb['export_DPI_PDF'], antialias=setdb['export_antialias'], color=setdb['export_color'], quality=setdb['export_quality'], backcolor=setdb['export_background'], svgtextastext=setdb['export_SVG_text_as_text'], svgdpi=setdb['export_DPI_SVG'], ) def _overwriteQuestion(filename): """Ask user whether file can be overwritten.""" retn = qt4.QMessageBox.question( self, _("Overwrite file?"), _("The file %s already exists") % os.path.basename(filename), qt4.QMessageBox.Save | qt4.QMessageBox.Cancel, qt4.QMessageBox.Cancel) return retn == qt4.QMessageBox.Save # count exported pages (in list so can be modified in function) pagecount = [0] def _checkAndExport(): """Check whether file exists and export if ok.""" if os.path.exists(export.filename): if not setdb['export_overwrite']: if not _overwriteQuestion(export.filename): return # show busy cursor qt4.QApplication.setOverrideCursor(qt4.QCursor(qt4.Qt.WaitCursor)) # delete file if already exists try: os.unlink(export.filename) except EnvironmentError: pass try: # actually do the export export.export() pagecount[0] += len(export.pagenumbers) except (RuntimeError, EnvironmentError) as e: # errors from the export if isinstance(e, EnvironmentError): msg = cstrerror(e) else: msg = cstr(e) qt4.QApplication.restoreOverrideCursor() qt4.QMessageBox.critical( self, _("Error - Veusz"), _("Error exporting to file '%s'\n\n%s") % (export.filename, msg)) else: qt4.QApplication.restoreOverrideCursor() if self.isMultiFile() or len(pages)==1: # write pages to multiple files for page in pages: pagename = self.document.getPage(page).name export.pagenumbers = [page] pg = page+1 fname = filename.replace('%PAGE%', str(pg)) fname = fname.replace('%PAGE00%', '%02i' % pg) fname = fname.replace('%PAGE000%', '%03i' % pg) fname = fname.replace('%PAGENAME%', pagename) export.filename = fname _checkAndExport() else: # write page/pages to single file fname = filename.replace('%PAGE%', _('none')) fname = fname.replace('%PAGE00%', _('none')) fname = fname.replace('%PAGE000%', _('none')) fname = fname.replace('%PAGENAME%', _('none')) export.pagenumbers = pages export.filename = fname _checkAndExport() dirname = os.path.dirname(filename) if dirname: setting.settingdb['dirname_export'] = dirname # format feedback ext = os.path.splitext(export.filename)[1] if ext: utils.feedback.exportcts[ext] += 1 if pagecount[0] > 0: self.showMessage(_('Exported %i page(s)') % pagecount[0])
gpl-2.0
8,928,892,009,857,449,000
36.931959
81
0.585802
false
4.144402
false
false
false
hakii27/PythonVersionMaster
Results/OneDimDot/MCTDHF/w=05/DensityPlot.py
1
1339
infile = open("DensityCC_w=05_L=10.txt",'r') infile2 = open("DensityFCI_w=05_N=2_L=6_t=10.txt",'r') infile3 = open("DensityCCSD_w=05_N=2_L=6_t=10.txt",'r') densityCC_HF = [] densityFCI = [] densityCC2 = [] infile.next() infile.next() infile2.next() infile2.next() infile3.next() infile3.next() for line in infile: tmp = line.split(",") tmp2 = tmp[0].split("(") d = float(tmp2[1]) densityCC_HF.append(d) for line in infile2: tmp = line.split(",") tmp2 = tmp[0].split("(") d = float(tmp2[1]) print d densityFCI.append(d) for line in infile3: tmp = line.split(",") tmp2 = tmp[0].split("(") d = float(tmp2[1]) print d densityCC2.append(d) from numpy import * Lx = 10 densityCC_HF = array(densityCC_HF) densityFCI = array(densityFCI) densityCC2 = array(densityCC2) #densityCC_HF = array(densityCC_HF) x = linspace(-Lx,Lx,len(densityFCI)) dx = x[1]-x[0] print sum(densityFCI) print sum(densityCC_HF) print sum(densityCC2) import matplotlib.pyplot as plt plt.figure(1) plt.title("Onebody Density for w=0.5 FCI vs. CCSD") plt.plot(x,densityCC2/dx,'-ob',x,densityFCI/dx,'-r') #,t_vec,EsinPert,'-r') plt.legend(["CCSD","FCI"]) plt.xlabel("x",fontsize=16) plt.ylabel("$p(x,x)$",fontsize=16) plt.figure(2) plt.title("Difference") plt.semilogy(x,abs(densityCC2-densityFCI)/dx,'o') plt.show()
lgpl-3.0
7,361,011,529,086,168,000
20.612903
75
0.655713
false
2.300687
false
false
false
Azure/azure-sdk-for-python
sdk/servicebus/azure-servicebus/tests/servicebus_preparer.py
1
25718
import functools import hashlib import os import time from collections import namedtuple from azure.mgmt.servicebus import ServiceBusManagementClient from azure.mgmt.servicebus.models import SBQueue, SBSubscription, AccessRights from azure_devtools.scenario_tests.exceptions import AzureTestError from devtools_testutils import ( ResourceGroupPreparer, AzureMgmtPreparer, FakeResource, get_region_override ) from devtools_testutils.resource_testcase import RESOURCE_GROUP_PARAM SERVICEBUS_DEFAULT_AUTH_RULE_NAME = 'RootManageSharedAccessKey' SERVICEBUS_NAMESPACE_PARAM = 'servicebus_namespace' SERVICEBUS_TOPIC_PARAM = 'servicebus_topic' SERVICEBUS_SUBSCRIPTION_PARAM = 'servicebus_subscription' SERVICEBUS_QUEUE_PARAM = 'servicebus_queue' SERVICEBUS_AUTHORIZATION_RULE_PARAM = 'servicebus_authorization_rule' SERVICEBUS_QUEUE_AUTHORIZATION_RULE_PARAM = 'servicebus_queue_authorization_rule' # Service Bus Namespace Preparer and its shorthand decorator class ServiceBusNamespacePreparer(AzureMgmtPreparer): def __init__(self, name_prefix='', use_cache=False, sku='Standard', location=get_region_override('westus'), parameter_name=SERVICEBUS_NAMESPACE_PARAM, resource_group_parameter_name=RESOURCE_GROUP_PARAM, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(ServiceBusNamespacePreparer, self).__init__(name_prefix, 24, random_name_enabled=random_name_enabled, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.location = location self.sku = sku self.resource_group_parameter_name = resource_group_parameter_name self.parameter_name = parameter_name self.connection_string = '' if random_name_enabled: self.resource_moniker = self.name_prefix + "sbname" self.set_cache(use_cache, sku, location) def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client(ServiceBusManagementClient) group = self._get_resource_group(**kwargs) retries = 4 for i in range(retries): try: namespace_async_operation = self.client.namespaces.create_or_update( group.name, name, { 'sku': {'name': self.sku}, 'location': self.location, } ) self.resource = namespace_async_operation.result() break except Exception as ex: error = "The requested resource {} does not exist".format(group.name) not_found_error = "Operation returned an invalid status code 'Not Found'" if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1: raise time.sleep(3) key = self.client.namespaces.list_keys(group.name, name, SERVICEBUS_DEFAULT_AUTH_RULE_NAME) self.connection_string = key.primary_connection_string self.key_name = key.key_name self.primary_key = key.primary_key self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker ) else: self.resource = FakeResource(name=name, id=name) self.connection_string = 'Endpoint=sb://{}.servicebus.windows.net/;SharedAccessKeyName=test;SharedAccessKey=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX='.format(name) self.key_name = SERVICEBUS_DEFAULT_AUTH_RULE_NAME self.primary_key = 'ZmFrZV9hY29jdW50X2tleQ==' return { self.parameter_name: self.resource, '{}_connection_string'.format(self.parameter_name): self.connection_string, '{}_key_name'.format(self.parameter_name): self.key_name, '{}_primary_key'.format(self.parameter_name): self.primary_key, } def remove_resource(self, name, **kwargs): if self.is_live: group = self._get_resource_group(**kwargs) self.client.namespaces.delete(group.name, name, polling=False) def _get_resource_group(self, **kwargs): try: return kwargs.get(self.resource_group_parameter_name) except KeyError: template = 'To create a service bus a resource group is required. Please add ' \ 'decorator @{} in front of this service bus preparer.' raise AzureTestError(template.format(ResourceGroupPreparer.__name__)) # Shared base class for service bus sub-resources that require a namespace and RG to exist. class _ServiceBusChildResourcePreparer(AzureMgmtPreparer): def __init__(self, name_prefix='', resource_group_parameter_name=RESOURCE_GROUP_PARAM, servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(_ServiceBusChildResourcePreparer, self).__init__(name_prefix, 24, random_name_enabled=random_name_enabled, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.resource_group_parameter_name = resource_group_parameter_name self.servicebus_namespace_parameter_name = servicebus_namespace_parameter_name def _get_resource_group(self, **kwargs): try: return kwargs.get(self.resource_group_parameter_name) except KeyError: template = 'To create this service bus child resource service bus a resource group is required. Please add ' \ 'decorator @{} in front of this service bus preparer.' raise AzureTestError(template.format(ResourceGroupPreparer.__name__)) def _get_namespace(self, **kwargs): try: return kwargs.get(self.servicebus_namespace_parameter_name) except KeyError: template = 'To create this service bus child resource a service bus namespace is required. Please add ' \ 'decorator @{} in front of this service bus preparer.' raise AzureTestError(template.format(ServiceBusNamespacePreparer.__name__)) class ServiceBusTopicPreparer(_ServiceBusChildResourcePreparer): def __init__(self, name_prefix='', use_cache=False, parameter_name=SERVICEBUS_TOPIC_PARAM, resource_group_parameter_name=RESOURCE_GROUP_PARAM, servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(ServiceBusTopicPreparer, self).__init__(name_prefix, random_name_enabled=random_name_enabled, resource_group_parameter_name=resource_group_parameter_name, servicebus_namespace_parameter_name=servicebus_namespace_parameter_name, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.parameter_name = parameter_name if random_name_enabled: self.resource_moniker = self.name_prefix + "sbtopic" self.set_cache(use_cache) def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client(ServiceBusManagementClient) group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) retries = 4 for i in range(retries): try: self.resource = self.client.topics.create_or_update( group.name, namespace.name, name, {} ) break except Exception as ex: error = "The requested resource {} does not exist".format(namespace) not_found_error = "Operation returned an invalid status code 'Not Found'" if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1: raise time.sleep(3) self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker ) else: self.resource = FakeResource(name=name, id=name) return { self.parameter_name: self.resource, } def remove_resource(self, name, **kwargs): if self.is_live: group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) self.client.topics.delete(group.name, namespace.name, name, polling=False) class ServiceBusSubscriptionPreparer(_ServiceBusChildResourcePreparer): def __init__(self, name_prefix='', use_cache=False, parameter_name=SERVICEBUS_SUBSCRIPTION_PARAM, resource_group_parameter_name=RESOURCE_GROUP_PARAM, servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM, servicebus_topic_parameter_name=SERVICEBUS_TOPIC_PARAM, requires_session=False, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(ServiceBusSubscriptionPreparer, self).__init__(name_prefix, random_name_enabled=random_name_enabled, resource_group_parameter_name=resource_group_parameter_name, servicebus_namespace_parameter_name=servicebus_namespace_parameter_name, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.servicebus_topic_parameter_name = servicebus_topic_parameter_name self.parameter_name = parameter_name if random_name_enabled: self.resource_moniker = self.name_prefix + "sbsub" self.set_cache(use_cache, requires_session) self.requires_session=requires_session if random_name_enabled: self.resource_moniker = self.name_prefix + "sbqueue" def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client(ServiceBusManagementClient) group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) topic = self._get_topic(**kwargs) retries = 4 for i in range(retries): try: self.resource = self.client.subscriptions.create_or_update( group.name, namespace.name, topic.name, name, SBSubscription( requires_session=self.requires_session ) ) break except Exception as ex: error = "The requested resource {} does not exist".format(namespace) not_found_error = "Operation returned an invalid status code 'Not Found'" if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1: raise time.sleep(3) self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker ) else: self.resource = FakeResource(name=name, id=name) return { self.parameter_name: self.resource, } def remove_resource(self, name, **kwargs): if self.is_live: group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) topic = self._get_topic(**kwargs) self.client.subscriptions.delete(group.name, namespace.name, topic.name, name, polling=False) def _get_topic(self, **kwargs): try: return kwargs.get(self.servicebus_topic_parameter_name) except KeyError: template = 'To create this service bus subscription a service bus topic is required. Please add ' \ 'decorator @{} in front of this service bus preparer.' raise AzureTestError(template.format(ServiceBusTopicPreparer.__name__)) class ServiceBusQueuePreparer(_ServiceBusChildResourcePreparer): def __init__(self, name_prefix='', use_cache=False, requires_duplicate_detection=False, dead_lettering_on_message_expiration=False, requires_session=False, lock_duration='PT30S', parameter_name=SERVICEBUS_QUEUE_PARAM, resource_group_parameter_name=RESOURCE_GROUP_PARAM, servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(ServiceBusQueuePreparer, self).__init__(name_prefix, random_name_enabled=random_name_enabled, resource_group_parameter_name=resource_group_parameter_name, servicebus_namespace_parameter_name=servicebus_namespace_parameter_name, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.parameter_name = parameter_name self.set_cache(use_cache, requires_duplicate_detection, dead_lettering_on_message_expiration, requires_session, lock_duration) # Queue parameters self.requires_duplicate_detection=requires_duplicate_detection self.dead_lettering_on_message_expiration=dead_lettering_on_message_expiration self.requires_session=requires_session self.lock_duration=lock_duration if random_name_enabled: self.resource_moniker = self.name_prefix + "sbqueue" def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client(ServiceBusManagementClient) group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) retries = 4 for i in range(retries): try: self.resource = self.client.queues.create_or_update( group.name, namespace.name, name, SBQueue( lock_duration=self.lock_duration, requires_duplicate_detection = self.requires_duplicate_detection, dead_lettering_on_message_expiration = self.dead_lettering_on_message_expiration, requires_session = self.requires_session) ) break except Exception as ex: error = "The requested resource {} does not exist".format(namespace) not_found_error = "Operation returned an invalid status code 'Not Found'" if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1: raise time.sleep(3) self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker ) else: self.resource = FakeResource(name=name, id=name) return { self.parameter_name: self.resource, } def remove_resource(self, name, **kwargs): if self.is_live: group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) self.client.queues.delete(group.name, namespace.name, name, polling=False) class ServiceBusNamespaceAuthorizationRulePreparer(_ServiceBusChildResourcePreparer): def __init__(self, name_prefix='', use_cache=False, access_rights=[AccessRights.manage, AccessRights.send, AccessRights.listen], parameter_name=SERVICEBUS_AUTHORIZATION_RULE_PARAM, resource_group_parameter_name=RESOURCE_GROUP_PARAM, servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(ServiceBusNamespaceAuthorizationRulePreparer, self).__init__(name_prefix, random_name_enabled=random_name_enabled, resource_group_parameter_name=resource_group_parameter_name, servicebus_namespace_parameter_name=servicebus_namespace_parameter_name, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.parameter_name = parameter_name self.access_rights = access_rights if random_name_enabled: self.resource_moniker = self.name_prefix + "sbnameauth" self.set_cache(use_cache, access_rights) def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client(ServiceBusManagementClient) group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) retries = 4 for i in range(retries): try: self.resource = self.client.namespaces.create_or_update_authorization_rule( group.name, namespace.name, name, self.access_rights ) break except Exception as ex: error = "The requested resource {} does not exist".format(namespace) not_found_error = "Operation returned an invalid status code 'Not Found'" if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1: raise time.sleep(3) key = self.client.namespaces.list_keys(group.name, namespace.name, name) connection_string = key.primary_connection_string self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker ) else: self.resource = FakeResource(name=name, id=name) connection_string = 'https://microsoft.com' return { self.parameter_name: self.resource, '{}_connection_string'.format(self.parameter_name): connection_string, } def remove_resource(self, name, **kwargs): if self.is_live: group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) self.client.namespaces.delete_authorization_rule(group.name, namespace.name, name, polling=False) class ServiceBusQueueAuthorizationRulePreparer(_ServiceBusChildResourcePreparer): def __init__(self, name_prefix='', use_cache=False, access_rights=[AccessRights.manage, AccessRights.send, AccessRights.listen], parameter_name=SERVICEBUS_QUEUE_AUTHORIZATION_RULE_PARAM, resource_group_parameter_name=RESOURCE_GROUP_PARAM, servicebus_namespace_parameter_name=SERVICEBUS_NAMESPACE_PARAM, servicebus_queue_parameter_name=SERVICEBUS_QUEUE_PARAM, disable_recording=True, playback_fake_resource=None, client_kwargs=None, random_name_enabled=True): super(ServiceBusQueueAuthorizationRulePreparer, self).__init__(name_prefix, random_name_enabled=random_name_enabled, resource_group_parameter_name=resource_group_parameter_name, servicebus_namespace_parameter_name=servicebus_namespace_parameter_name, disable_recording=disable_recording, playback_fake_resource=playback_fake_resource, client_kwargs=client_kwargs) self.parameter_name = parameter_name self.access_rights = access_rights self.servicebus_queue_parameter_name = servicebus_queue_parameter_name if random_name_enabled: self.resource_moniker = self.name_prefix + "sbqueueauth" self.set_cache(use_cache, access_rights) def create_resource(self, name, **kwargs): if self.is_live: self.client = self.create_mgmt_client(ServiceBusManagementClient) group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) queue = self._get_queue(**kwargs) retries = 4 for i in range(retries): try: self.resource = self.client.queues.create_or_update_authorization_rule( group.name, namespace.name, queue.name, name, self.access_rights ) break except Exception as ex: error = "The requested resource {} does not exist".format(namespace) not_found_error = "Operation returned an invalid status code 'Not Found'" if (error not in str(ex) and not_found_error not in str(ex)) or i == retries - 1: raise time.sleep(3) key = self.client.queues.list_keys(group.name, namespace.name, queue.name, name) connection_string = key.primary_connection_string self.test_class_instance.scrubber.register_name_pair( name, self.resource_moniker ) else: self.resource = FakeResource(name=name, id=name) connection_string = 'https://microsoft.com' return { self.parameter_name: self.resource, '{}_connection_string'.format(self.parameter_name): connection_string, } def remove_resource(self, name, **kwargs): if self.is_live: group = self._get_resource_group(**kwargs) namespace = self._get_namespace(**kwargs) queue = self._get_queue(**kwargs) self.client.queues.delete_authorization_rule(group.name, namespace.name, queue.name, name, polling=False) def _get_queue(self, **kwargs): try: return kwargs.get(self.servicebus_queue_parameter_name) except KeyError: template = 'To create this service bus queue authorization rule a service bus queue is required. Please add ' \ 'decorator @{} in front of this service bus preparer.' raise AzureTestError(template.format(ServiceBusQueuePreparer.__name__)) CachedServiceBusNamespacePreparer = functools.partial(ServiceBusNamespacePreparer, use_cache=True) CachedServiceBusQueuePreparer = functools.partial(ServiceBusQueuePreparer, use_cache=True) CachedServiceBusTopicPreparer = functools.partial(ServiceBusTopicPreparer, use_cache=True) CachedServiceBusSubscriptionPreparer = functools.partial(ServiceBusSubscriptionPreparer, use_cache=True)
mit
2,745,185,431,363,098,600
49.826087
177
0.562291
false
4.771429
true
false
false
vprusso/npa_nonlocal
npa_nonlocal/util.py
1
2230
# -*- coding: utf-8 -*- ''' #------------------------------------------------------------------------------ # Name: util.py # Purpose: Various utility functions for npa_nonlocal. # # Author: Vincent Russo (vrusso@cs.uwaterloo.ca) # # Created: 1/13/2015 # Copyright: (c) Vincent Russo 2015 # Licence: GNU #------------------------------------------------------------------------------ ''' import os import shelve import itertools def check_equal(iterator): '''Checks if elements in an iterable object are all equal to each other.''' return len(set(iterator)) <= 1 def chunks(l, n): '''Splits a list (l) into (n) separate chunks.''' if n < 1: n = 1 return [l[i:i + n] for i in range(0, len(l), n)] def clear(): '''Clears the shell of the spyder application. Use either clear() or cls() ''' os.system('cls') return None def clear_all(): '''Clears all the variables from the workspace of the spyder application''' cls() gl = globals().copy() for var in gl: if var[0] == '_': continue if 'func' in str(globals()[var]): continue if 'module' in str(globals()[var]): continue del globals()[var] def generate_bit_strings(n, basis): '''Generates all bit strings of length n.''' return ["".join(seq) for seq in itertools.product(basis, repeat=n)] def list_2_str(_list): '''Converts a list of objects into a concatenation of strings.''' return ' '.join(map(str, _list)) def load_workspace(): ''' Loads the variables in Python workspaces (similar to MATLAB)''' my_shelf = shelve.open(filename) for key in my_shelf: globals()[key]=my_shelf[key] my_shelf.close() def save_workspace(): ''' Saves the variables in Python workspace (similar to MATLAB)''' filename='shelve.out' my_shelf = shelve.open(filename,'n') # 'n' for new for key in dir(): try: my_shelf[key] = globals()[key] except TypeError: # # __builtins__, my_shelf, and imported modules can not be shelved. # print('ERROR shelving: {0}'.format(key)) my_shelf.close()
gpl-2.0
-4,372,464,292,768,497,000
25.547619
79
0.541256
false
3.722871
false
false
false
Strassengezwitscher/Strassengezwitscher
crowdgezwitscher/events/views.py
1
1531
from django.contrib.auth.mixins import PermissionRequiredMixin from django.views.generic.list import ListView from django.views.generic.detail import DetailView from django.views.generic.edit import DeleteView from django.urls import reverse_lazy from extra_views import CreateWithInlinesView, UpdateWithInlinesView from events.models import Event from events.forms import EventForm, AttachmentFormSet class EventListView(PermissionRequiredMixin, ListView): permission_required = 'events.view_event' model = Event template_name = 'events/list.html' context_object_name = 'events' ordering = '-date' class EventDetail(PermissionRequiredMixin, DetailView): permission_required = 'events.view_event' model = Event template_name = 'events/detail.html' context_object_name = 'event' class EventCreate(PermissionRequiredMixin, CreateWithInlinesView): permission_required = 'events.add_event' model = Event inlines = [AttachmentFormSet] template_name = 'events/form.html' form_class = EventForm class EventUpdate(PermissionRequiredMixin, UpdateWithInlinesView): permission_required = 'events.change_event' model = Event inlines = [AttachmentFormSet] template_name = 'events/form.html' form_class = EventForm class EventDelete(PermissionRequiredMixin, DeleteView): permission_required = 'events.delete_event' model = Event template_name = 'events/delete.html' success_url = reverse_lazy('events:list') context_object_name = 'event'
mit
-7,796,877,386,715,528,000
30.895833
68
0.758981
false
3.997389
false
false
false
hryamzik/ansible
lib/ansible/module_utils/network/cnos/cnos_devicerules.py
1
91032
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by # Ansible still belong to the author of the module, and may assign their # own license to the complete work. # # Copyright (C) 2017 Lenovo, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Contains device rule and methods # Lenovo Networking def getRuleString(deviceType, variableId): retVal = variableId + ":" if(deviceType == 'g8272_cnos'): if variableId in g8272_cnos: retVal = retVal + g8272_cnos[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'g8296_cnos'): if variableId in g8296_cnos: retVal = retVal + g8296_cnos[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'g8332_cnos'): if variableId in g8332_cnos: retVal = retVal + g8332_cnos[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'NE1072T'): if variableId in NE1072T: retVal = retVal + NE1072T[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'NE1032'): if variableId in NE1032: retVal = retVal + NE1032[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'NE1032T'): if variableId in NE1032T: retVal = retVal + NE1032T[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'NE10032'): if variableId in NE10032: retVal = retVal + NE10032[variableId] else: retVal = "The variable " + variableId + " is not supported" elif(deviceType == 'NE2572'): if variableId in NE2572: retVal = retVal + NE2572[variableId] else: retVal = "The variable " + variableId + " is not supported" else: if variableId in default_cnos: retVal = retVal + default_cnos[variableId] else: retVal = "The variable " + variableId + " is not supported" return retVal # EOM default_cnos = { 'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-32', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,\ interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,\ trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\ input,output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,\ vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } NE2572 = { 'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-54', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\ output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } NE1032T = { 'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-32', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\ output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } NE1032 = { 'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-32', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\ output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } NE1072T = { 'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-54', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\ output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } NE10032 = { 'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-32', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\ output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } g8272_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-64', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-54', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\ output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,40000', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } g8296_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-128', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-96', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-96', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\ arp,dhcp,ospf,port,port-unreachable,redirects,router,\ unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\ input,output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', } g8332_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999', 'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999', 'vlan_name': 'TEXT:', 'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6', 'vlan_state': 'TEXT_OPTIONS:active,suspend', 'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25', 'vlan_querier': 'IPV4Address:', 'vlan_querier_timeout': 'INTEGER_VALUE:1-65535', 'vlan_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_query_max_response_time': 'INTEGER_VALUE:1-25', 'vlan_report_suppression': 'INTEGER_VALUE:1-25', 'vlan_robustness_variable': 'INTEGER_VALUE:1-7', 'vlan_startup_query_count': 'INTEGER_VALUE:1-10', 'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000', 'vlan_snooping_version': 'INTEGER_VALUE:2-3', 'vlan_access_map_name': 'TEXT: ', 'vlan_ethernet_interface': 'TEXT:', 'vlan_portagg_number': 'INTEGER_VALUE:1-4096', 'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect', 'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only', 'vlan_filter_name': 'TEXT:', 'vlag_auto_recovery': 'INTEGER_VALUE:240-3600', 'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict', 'vlag_instance': 'INTEGER_VALUE:1-128', 'vlag_port_aggregation': 'INTEGER_VALUE:1-4096', 'vlag_priority': 'INTEGER_VALUE:0-65535', 'vlag_startup_delay': 'INTEGER_VALUE:0-3600', 'vlag_tier_id': 'INTEGER_VALUE:1-512', 'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\ keepalive-interval,peer-ip,retry-interval', 'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24', 'vlag_keepalive_interval': 'INTEGER_VALUE:2-300', 'vlag_retry_interval': 'INTEGER_VALUE:1-300', 'vlag_peerip': 'IPV4Address:', 'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management', 'bgp_as_number': 'NO_VALIDATION:1-4294967295', 'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_bgp_local_count': 'INTEGER_VALUE:2-64', 'cluster_id_as_ip': 'IPV4Address:', 'cluster_id_as_number': 'NO_VALIDATION:1-4294967295', 'confederation_identifier': 'INTEGER_VALUE:1-65535', 'condeferation_peers_as': 'INTEGER_VALUE:1-65535', 'stalepath_delay_value': 'INTEGER_VALUE:1-3600', 'maxas_limit_as': 'INTEGER_VALUE:1-2000', 'neighbor_ipaddress': 'IPV4Address:', 'neighbor_as': 'NO_VALIDATION:1-4294967295', 'router_id': 'IPV4Address:', 'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600', 'bgp_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_aggregate_prefix': 'IPV4AddressWithMask:', 'addrfamily_routemap_name': 'TEXT:', 'reachability_half_life': 'INTEGER_VALUE:1-45', 'start_reuse_route_value': 'INTEGER_VALUE:1-20000', 'start_suppress_route_value': 'INTEGER_VALUE:1-20000', 'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255', 'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45', 'distance_external_AS': 'INTEGER_VALUE:1-255', 'distance_internal_AS': 'INTEGER_VALUE:1-255', 'distance_local_routes': 'INTEGER_VALUE:1-255', 'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp', 'maxpath_numbers': 'INTEGER_VALUE:2-32', 'network_ip_prefix_with_mask': 'IPV4AddressWithMask:', 'network_ip_prefix_value': 'IPV4Address:', 'network_ip_prefix_mask': 'IPV4Address:', 'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295', 'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295', 'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\ static', 'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10', 'bgp_neighbor_af_filtername': 'TEXT:', 'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870', 'bgp_neighbor_af_prefixname': 'TEXT:', 'bgp_neighbor_af_routemap': 'TEXT:', 'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6', 'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535', 'bgp_neighbor_description': 'TEXT:', 'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255', 'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295', 'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96', 'bgp_neighbor_password': 'TEXT:', 'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600', 'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254', 'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\ vlan', 'bgp_neighbor_update_ethernet': 'TEXT:', 'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7', 'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094', 'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535', 'ethernet_interface_value': 'INTEGER_VALUE:1-32', 'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32', 'ethernet_interface_string': 'TEXT:', 'loopback_interface_value': 'INTEGER_VALUE:0-7', 'mgmt_interface_value': 'INTEGER_VALUE:0-0', 'vlan_interface_value': 'INTEGER_VALUE:1-4094', 'portchannel_interface_value': 'INTEGER_VALUE:1-4096', 'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096', 'portchannel_interface_string': 'TEXT:', 'aggregation_group_no': 'INTEGER_VALUE:1-4096', 'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive', 'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\ ipv6,neighbor', 'bfd_interval': 'INTEGER_VALUE:50-999', 'bfd_minrx': 'INTEGER_VALUE:50-999', 'bfd_ multiplier': 'INTEGER_VALUE:3-50', 'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval', 'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\ meticulous-keyed-md5,meticulous-keyed-sha1,simple', 'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id', 'bfd_key_chain': 'TEXT:', 'bfd_key_id': 'INTEGER_VALUE:0-255', 'bfd_key_name': 'TEXT:', 'bfd_neighbor_ip': 'TEXT:', 'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\ non-persistent', 'bfd_access_vlan': 'INTEGER_VALUE:1-3999', 'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk', 'trunk_options': 'TEXT_OPTIONS:allowed,native', 'trunk_vlanid': 'INTEGER_VALUE:1-3999', 'portCh_description': 'TEXT:', 'duplex_option': 'TEXT_OPTIONS:auto,full,half', 'flowcontrol_options': 'TEXT_OPTIONS:receive,send', 'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,arp,\ dhcp,ospf,port,port-unreachable,redirects,router,unreachables', 'accessgroup_name': 'TEXT:', 'portchannel_ipv4': 'IPV4Address:', 'portchannel_ipv4_mask': 'TEXT:', 'arp_ipaddress': 'IPV4Address:', 'arp_macaddress': 'TEXT:', 'arp_timeout_value': 'INTEGER_VALUE:60-28800', 'relay_ipaddress': 'IPV4Address:', 'ip_ospf_options': 'TEXT_OPTIONS:authentication,\ authentication-key,bfd,cost,database-filter,dead-interval,\ hello-interval,message-digest-key,mtu,mtu-ignore,network,\ passive-interface,priority,retransmit-interval,shutdown,\ transmit-delay', 'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295', 'ospf_id_ipaddres_value': 'IPV4Address:', 'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\ timeout', 'port_priority': 'INTEGER_VALUE:1-65535', 'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\ trap-notification', 'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\ mac-phy-status,management-address,max-frame-size,\ port-description,port-protocol-vlan,port-vlan,power-mdi,\ protocol-identity,system-capabilities,system-description,\ system-name,vid-management,vlan-name', 'load_interval_delay': 'INTEGER_VALUE:30-300', 'load_interval_counter': 'INTEGER_VALUE:1-3', 'mac_accessgroup_name': 'TEXT:', 'mac_address': 'TEXT:', 'microburst_threshold': 'NO_VALIDATION:1-4294967295', 'mtu_value': 'INTEGER_VALUE:64-9216', 'service_instance': 'NO_VALIDATION:1-4294967295', 'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\ input,output,type', 'service_policy_name': 'TEXT:', 'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\ cost,disable,enable,guard,link-type,mst,port,port-priority,vlan', 'spanning_tree_cost': 'NO_VALIDATION:1-200000000', 'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999', 'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\ 192,224', 'portchannel_ipv6_neighbor_mac': 'TEXT:', 'portchannel_ipv6_neighbor_address': 'IPV6Address:', 'portchannel_ipv6_linklocal': 'IPV6Address:', 'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094', 'portchannel_ipv6_dhcp_ethernet': 'TEXT:', 'portchannel_ipv6_dhcp': 'IPV6Address:', 'portchannel_ipv6_address': 'IPV6Address:', 'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\ link-local,nd,neighbor', 'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,50000,auto', 'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\ unicast', 'stormcontrol_level': 'FLOAT:', 'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\ egress-only', 'vrrp_id': 'INTEGER_VALUE:1-255', }
gpl-3.0
5,255,834,514,839,444,000
51.650087
80
0.625945
false
3.310616
false
false
false
codepongo/cook
script/cook.py
1
4343
#coding:utf-8 import datetime import sys import os sys.path.append(os.path.dirname(__file__)) import shutil import web import markdown2 perpage = 5 try: import conf path = conf.path css_path = conf.csspath web.config.debug=conf.debug domain = conf.domain suffix = conf.suffix except: icopath = './' path = './md' css_path = './css' web.config.debug=True domain ='http://127.0.0.1:8080' suffix = '.md' class base: def __init__(self): self.entities = [] if not os.path.isdir(path): os.mkdir(path) for p in os.listdir(path): if os.path.isdir(p): continue ext = os.path.splitext(p)[1] if ext == suffix: self.entities.append(os.path.join(path,p)) self.entities.sort(reverse=True) def entity(self, idx): return self.generate(idx, idx+1) def entities(self): return self.generate(0, len(self.entities)) def generate(self, begin, end): es = [] #entities in page if len(self.entities) == 0: return es for i in range(begin, end): e = {} e['date'] = os.path.splitext(self.entities[i])[0].replace(path+os.sep, '')[:10] with open(self.entities[i], 'rb') as f: e['id'] = os.path.splitext(os.path.basename(self.entities[i]))[0] title = f.readline() title_tag = f.readline() image = f.readline() e['title'] = title #markdown2.markdown(title) e['image'] = markdown2.markdown(image).replace('<img src="', '<img width="160" height="120" src="/').replace('<p>', '').replace('</p>', '') content = title + title_tag + image + f.read() c = markdown2.markdown(content)#.replace('<img src="', '<img width="480" height="360" src="/') e['content'] = c es.append(e) f.close() return es class static: def GET(self, name): if name == 'favicon.ico': with open(os.path.join(icopath, name), 'rb') as f: content = f.read() f.close() web.header('content-type', 'image/x-icon') return content if os.path.splitext(name)[1][1:] == 'css': web.header('content-type', 'text/css') with open(os.path.join(css_path, name), 'rb') as f: content = f.read() f.close() return content if name == 'robots.txt': web.header('content-type', 'text/plain') elif os.path.splitext(name)[1][1:] == 'jpg': web.header('content-type', 'image/jpeg') else: web.header('content-type', 'image/%s' % os.path.splitext(name)[1][1:].lower()) with open(os.path.join(path,name), 'rb') as f: content = f.read() f.close() return content class feed(base): def GET(self): date = datetime.datetime.today().strftime("%a, %d %b %Y %H:%M:%S +0200") web.header('Content-Type', 'application/xml') templates = os.path.join(os.path.dirname(__file__), 'templates') render = web.template.render(templates) return render.feed(entities=base.entities(self)[:5], date=date,domain=domain) class cook(base): def GET(self, name=''): count = len(self.entities) templates = os.path.join(os.path.dirname(__file__), 'templates') render = web.template.render(templates) if name == '': return render.index(base.entities(self)) try: idx = self.entities.index(os.path.join(path, name + suffix)) p = n = True if idx <= 0: p = False if idx >= count - 1: n = False return render.entity(base.entity(self,idx), idx, p, n) except: return render.index(base.entities(self)) urls = ( '/(.*.JPEG)', static, '/(.*.jpeg)', static, '/(.*.jpg)', static, '/(.*.css)', static, '/(favicon.ico)', static, '/feed', feed, '/rss', feed, '/(robots.txt)',static, '/(.*)',cook, ) app = web.application(urls, globals()) if __name__ == '__main__': app.run() else: application = app.wsgifunc()
unlicense
2,263,865,417,661,544,200
32.666667
155
0.519226
false
3.601161
false
false
false
akretion/odoo
addons/mrp/report/mrp_report_bom_structure.py
2
12989
# -*- coding: utf-8 -*- import json from odoo import api, models, _ from odoo.tools import float_round class ReportBomStructure(models.AbstractModel): _name = 'report.mrp.report_bom_structure' _description = 'BOM Structure Report' @api.model def _get_report_values(self, docids, data=None): docs = [] for bom_id in docids: bom = self.env['mrp.bom'].browse(bom_id) candidates = bom.product_id or bom.product_tmpl_id.product_variant_ids for product_variant_id in candidates.ids: if data and data.get('childs'): doc = self._get_pdf_line(bom_id, product_id=product_variant_id, qty=float(data.get('quantity')), child_bom_ids=json.loads(data.get('childs'))) else: doc = self._get_pdf_line(bom_id, product_id=product_variant_id, unfolded=True) doc['report_type'] = 'pdf' doc['report_structure'] = data and data.get('report_type') or 'all' docs.append(doc) if not candidates: if data and data.get('childs'): doc = self._get_pdf_line(bom_id, qty=float(data.get('quantity')), child_bom_ids=json.loads(data.get('childs'))) else: doc = self._get_pdf_line(bom_id, unfolded=True) doc['report_type'] = 'pdf' doc['report_structure'] = data and data.get('report_type') or 'all' docs.append(doc) return { 'doc_ids': docids, 'doc_model': 'mrp.bom', 'docs': docs, } @api.model def get_html(self, bom_id=False, searchQty=1, searchVariant=False): res = self._get_report_data(bom_id=bom_id, searchQty=searchQty, searchVariant=searchVariant) res['lines']['report_type'] = 'html' res['lines']['report_structure'] = 'all' res['lines']['has_attachments'] = res['lines']['attachments'] or any(component['attachments'] for component in res['lines']['components']) res['lines'] = self.env.ref('mrp.report_mrp_bom').render({'data': res['lines']}) return res @api.model def get_bom(self, bom_id=False, product_id=False, line_qty=False, line_id=False, level=False): lines = self._get_bom(bom_id=bom_id, product_id=product_id, line_qty=line_qty, line_id=line_id, level=level) return self.env.ref('mrp.report_mrp_bom_line').render({'data': lines}) @api.model def get_operations(self, bom_id=False, qty=0, level=0): bom = self.env['mrp.bom'].browse(bom_id) lines = self._get_operation_line(bom.routing_id, float_round(qty / bom.product_qty, precision_rounding=1, rounding_method='UP'), level) values = { 'bom_id': bom_id, 'currency': self.env.user.company_id.currency_id, 'operations': lines, } return self.env.ref('mrp.report_mrp_operation_line').render({'data': values}) def _get_bom_reference(self, bom): return bom.display_name @api.model def _get_report_data(self, bom_id, searchQty=0, searchVariant=False): lines = {} bom = self.env['mrp.bom'].browse(bom_id) bom_quantity = searchQty or bom.product_qty or 1 bom_product_variants = {} bom_uom_name = '' if bom: bom_uom_name = bom.product_uom_id.name # Get variants used for search if not bom.product_id: for variant in bom.product_tmpl_id.product_variant_ids: bom_product_variants[variant.id] = variant.display_name lines = self._get_bom(bom_id, product_id=searchVariant, line_qty=bom_quantity, level=1) return { 'lines': lines, 'variants': bom_product_variants, 'bom_uom_name': bom_uom_name, 'bom_qty': bom_quantity, 'is_variant_applied': self.env.user.user_has_groups('product.group_product_variant') and len(bom_product_variants) > 1, 'is_uom_applied': self.env.user.user_has_groups('uom.group_uom') } def _get_bom(self, bom_id=False, product_id=False, line_qty=False, line_id=False, level=False): bom = self.env['mrp.bom'].browse(bom_id) bom_quantity = line_qty if line_id: current_line = self.env['mrp.bom.line'].browse(int(line_id)) bom_quantity = current_line.product_uom_id._compute_quantity(line_qty, bom.product_uom_id) # Display bom components for current selected product variant if product_id: product = self.env['product.product'].browse(int(product_id)) else: product = bom.product_id or bom.product_tmpl_id.product_variant_id if product: attachments = self.env['mrp.document'].search(['|', '&', ('res_model', '=', 'product.product'), ('res_id', '=', product.id), '&', ('res_model', '=', 'product.template'), ('res_id', '=', product.product_tmpl_id.id)]) else: product = bom.product_tmpl_id attachments = self.env['mrp.document'].search([('res_model', '=', 'product.template'), ('res_id', '=', product.id)]) operations = [] if bom.product_qty > 0: operations = self._get_operation_line(bom.routing_id, float_round(bom_quantity / bom.product_qty, precision_rounding=1, rounding_method='UP'), 0) lines = { 'bom': bom, 'bom_qty': bom_quantity, 'bom_prod_name': product.display_name, 'currency': self.env.user.company_id.currency_id, 'product': product, 'code': bom and self._get_bom_reference(bom) or '', 'price': product.uom_id._compute_price(product.standard_price, bom.product_uom_id) * bom_quantity, 'total': sum([op['total'] for op in operations]), 'level': level or 0, 'operations': operations, 'operations_cost': sum([op['total'] for op in operations]), 'attachments': attachments, 'operations_time': sum([op['duration_expected'] for op in operations]) } components, total = self._get_bom_lines(bom, bom_quantity, product, line_id, level) lines['components'] = components lines['total'] += total return lines def _get_bom_lines(self, bom, bom_quantity, product, line_id, level): components = [] total = 0 for line in bom.bom_line_ids: line_quantity = (bom_quantity / (bom.product_qty or 1.0)) * line.product_qty if line._skip_bom_line(product): continue price = line.product_id.uom_id._compute_price(line.product_id.standard_price, line.product_uom_id) * line_quantity if line.child_bom_id: factor = line.product_uom_id._compute_quantity(line_quantity, line.child_bom_id.product_uom_id) / line.child_bom_id.product_qty sub_total = self._get_price(line.child_bom_id, factor, line.product_id) else: sub_total = price sub_total = self.env.user.company_id.currency_id.round(sub_total) components.append({ 'prod_id': line.product_id.id, 'prod_name': line.product_id.display_name, 'code': line.child_bom_id and self._get_bom_reference(line.child_bom_id) or '', 'prod_qty': line_quantity, 'prod_uom': line.product_uom_id.name, 'prod_cost': self.env.user.company_id.currency_id.round(price), 'parent_id': bom.id, 'line_id': line.id, 'level': level or 0, 'total': sub_total, 'child_bom': line.child_bom_id.id, 'phantom_bom': line.child_bom_id and line.child_bom_id.type == 'phantom' or False, 'attachments': self.env['mrp.document'].search(['|', '&', ('res_model', '=', 'product.product'), ('res_id', '=', line.product_id.id), '&', ('res_model', '=', 'product.template'), ('res_id', '=', line.product_id.product_tmpl_id.id)]), }) total += sub_total return components, total def _get_operation_line(self, routing, qty, level): operations = [] total = 0.0 for operation in routing.operation_ids: operation_cycle = float_round(qty / operation.workcenter_id.capacity, precision_rounding=1, rounding_method='UP') duration_expected = operation_cycle * operation.time_cycle + operation.workcenter_id.time_stop + operation.workcenter_id.time_start total = ((duration_expected / 60.0) * operation.workcenter_id.costs_hour) operations.append({ 'level': level or 0, 'operation': operation, 'name': operation.name + ' - ' + operation.workcenter_id.name, 'duration_expected': duration_expected, 'total': self.env.user.company_id.currency_id.round(total), }) return operations def _get_price(self, bom, factor, product): price = 0 if bom.routing_id: # routing are defined on a BoM and don't have a concept of quantity. # It means that the operation time are defined for the quantity on # the BoM (the user produces a batch of products). E.g the user # product a batch of 10 units with a 5 minutes operation, the time # will be the 5 for a quantity between 1-10, then doubled for # 11-20,... operation_cycle = float_round(factor, precision_rounding=1, rounding_method='UP') operations = self._get_operation_line(bom.routing_id, operation_cycle, 0) price += sum([op['total'] for op in operations]) for line in bom.bom_line_ids: if line._skip_bom_line(product): continue if line.child_bom_id: qty = line.product_uom_id._compute_quantity(line.product_qty * factor, line.child_bom_id.product_uom_id) / line.child_bom_id.product_qty sub_price = self._get_price(line.child_bom_id, qty, line.product_id) price += sub_price else: prod_qty = line.product_qty * factor not_rounded_price = line.product_id.uom_id._compute_price(line.product_id.standard_price, line.product_uom_id) * prod_qty price += self.env.user.company_id.currency_id.round(not_rounded_price) return price def _get_pdf_line(self, bom_id, product_id=False, qty=1, child_bom_ids=[], unfolded=False): data = self._get_bom(bom_id=bom_id, product_id=product_id, line_qty=qty) def get_sub_lines(bom, product_id, line_qty, line_id, level): data = self._get_bom(bom_id=bom.id, product_id=product_id, line_qty=line_qty, line_id=line_id, level=level) bom_lines = data['components'] lines = [] for bom_line in bom_lines: lines.append({ 'name': bom_line['prod_name'], 'type': 'bom', 'quantity': bom_line['prod_qty'], 'uom': bom_line['prod_uom'], 'prod_cost': bom_line['prod_cost'], 'bom_cost': bom_line['total'], 'level': bom_line['level'], 'code': bom_line['code'] }) if bom_line['child_bom'] and (unfolded or bom_line['child_bom'] in child_bom_ids): line = self.env['mrp.bom.line'].browse(bom_line['line_id']) lines += (get_sub_lines(line.child_bom_id, line.product_id, bom_line['prod_qty'], line, level + 1)) if data['operations']: lines.append({ 'name': _('Operations'), 'type': 'operation', 'quantity': data['operations_time'], 'uom': _('minutes'), 'bom_cost': data['operations_cost'], 'level': level, }) for operation in data['operations']: if unfolded or 'operation-' + str(bom.id) in child_bom_ids: lines.append({ 'name': operation['name'], 'type': 'operation', 'quantity': operation['duration_expected'], 'uom': _('minutes'), 'bom_cost': operation['total'], 'level': level + 1, }) return lines bom = self.env['mrp.bom'].browse(bom_id) product = product_id or bom.product_id or bom.product_tmpl_id.product_variant_id pdf_lines = get_sub_lines(bom, product, qty, False, 1) data['components'] = [] data['lines'] = pdf_lines return data
agpl-3.0
8,412,093,710,462,063,000
49.344961
195
0.553314
false
3.756217
false
false
false
serzans/wagtail
wagtail/wagtailadmin/edit_handlers.py
1
26782
from __future__ import unicode_literals import copy from modelcluster.forms import ClusterForm, ClusterFormMetaclass from django.db import models from django.template.loader import render_to_string from django.utils.safestring import mark_safe from django.utils.six import text_type from django import forms from django.forms.models import fields_for_model from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured from django.utils.translation import ugettext_lazy from taggit.managers import TaggableManager from wagtail.wagtailadmin import widgets from wagtail.wagtailcore.models import Page from wagtail.wagtailcore.utils import camelcase_to_underscore, resolve_model_string from wagtail.utils.compat import get_related_model, get_related_parent_model # Form field properties to override whenever we encounter a model field # that matches one of these types - including subclasses FORM_FIELD_OVERRIDES = { models.DateField: {'widget': widgets.AdminDateInput}, models.TimeField: {'widget': widgets.AdminTimeInput}, models.DateTimeField: {'widget': widgets.AdminDateTimeInput}, TaggableManager: {'widget': widgets.AdminTagWidget}, } # Form field properties to override whenever we encounter a model field # that matches one of these types exactly, ignoring subclasses. # (This allows us to override the widget for models.TextField, but leave # the RichTextField widget alone) DIRECT_FORM_FIELD_OVERRIDES = { models.TextField: {'widget': widgets.AdminAutoHeightTextInput}, } # Callback to allow us to override the default form fields provided for each model field. def formfield_for_dbfield(db_field, **kwargs): # adapted from django/contrib/admin/options.py overrides = None # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. if db_field.__class__ in DIRECT_FORM_FIELD_OVERRIDES: overrides = DIRECT_FORM_FIELD_OVERRIDES[db_field.__class__] else: for klass in db_field.__class__.mro(): if klass in FORM_FIELD_OVERRIDES: overrides = FORM_FIELD_OVERRIDES[klass] break if overrides: kwargs = dict(copy.deepcopy(overrides), **kwargs) return db_field.formfield(**kwargs) def widget_with_script(widget, script): return mark_safe('{0}<script>{1}</script>'.format(widget, script)) class WagtailAdminModelFormMetaclass(ClusterFormMetaclass): # Override the behaviour of the regular ModelForm metaclass - # which handles the translation of model fields to form fields - # to use our own formfield_for_dbfield function to do that translation. # This is done by sneaking a formfield_callback property into the class # being defined (unless the class already provides a formfield_callback # of its own). # while we're at it, we'll also set extra_form_count to 0, as we're creating # extra forms in JS extra_form_count = 0 def __new__(cls, name, bases, attrs): if 'formfield_callback' not in attrs or attrs['formfield_callback'] is None: attrs['formfield_callback'] = formfield_for_dbfield new_class = super(WagtailAdminModelFormMetaclass, cls).__new__(cls, name, bases, attrs) return new_class WagtailAdminModelForm = WagtailAdminModelFormMetaclass(str('WagtailAdminModelForm'), (ClusterForm,), {}) # Now, any model forms built off WagtailAdminModelForm instead of ModelForm should pick up # the nice form fields defined in FORM_FIELD_OVERRIDES. def get_form_for_model( model, fields=None, exclude=None, formsets=None, exclude_formsets=None, widgets=None ): # django's modelform_factory with a bit of custom behaviour # (dealing with Treebeard's tree-related fields that really should have # been editable=False) attrs = {'model': model} if fields is not None: attrs['fields'] = fields if exclude is not None: attrs['exclude'] = exclude if issubclass(model, Page): attrs['exclude'] = attrs.get('exclude', []) + ['content_type', 'path', 'depth', 'numchild'] if widgets is not None: attrs['widgets'] = widgets if formsets is not None: attrs['formsets'] = formsets if exclude_formsets is not None: attrs['exclude_formsets'] = exclude_formsets # Give this new form class a reasonable name. class_name = model.__name__ + str('Form') form_class_attrs = { 'Meta': type(str('Meta'), (object,), attrs) } return WagtailAdminModelFormMetaclass(class_name, (WagtailAdminModelForm,), form_class_attrs) def extract_panel_definitions_from_model_class(model, exclude=None): if hasattr(model, 'panels'): return model.panels panels = [] _exclude = [] if exclude: _exclude.extend(exclude) if issubclass(model, Page): _exclude = ['content_type', 'path', 'depth', 'numchild'] fields = fields_for_model(model, exclude=_exclude, formfield_callback=formfield_for_dbfield) for field_name, field in fields.items(): try: panel_class = field.widget.get_panel() except AttributeError: panel_class = FieldPanel panel = panel_class(field_name) panels.append(panel) return panels class EditHandler(object): """ Abstract class providing sensible default behaviours for objects implementing the EditHandler API """ # return list of widget overrides that this EditHandler wants to be in place # on the form it receives @classmethod def widget_overrides(cls): return {} # return list of fields that this EditHandler expects to find on the form @classmethod def required_fields(cls): return [] # return a dict of formsets that this EditHandler requires to be present # as children of the ClusterForm; the dict is a mapping from relation name # to parameters to be passed as part of get_form_for_model's 'formsets' kwarg @classmethod def required_formsets(cls): return {} # return any HTML that needs to be output on the edit page once per edit handler definition. # Typically this will be used to define snippets of HTML within <script type="text/x-template"></script> blocks # for Javascript code to work with. @classmethod def html_declarations(cls): return '' # the top-level edit handler is responsible for providing a form class that can produce forms # acceptable to the edit handler _form_class = None @classmethod def get_form_class(cls, model): if cls._form_class is None: cls._form_class = get_form_for_model( model, fields=cls.required_fields(), formsets=cls.required_formsets(), widgets=cls.widget_overrides()) return cls._form_class def __init__(self, instance=None, form=None): if not instance: raise ValueError("EditHandler did not receive an instance object") self.instance = instance if not form: raise ValueError("EditHandler did not receive a form object") self.form = form # Heading / help text to display to the user heading = "" help_text = "" def classes(self): """ Additional CSS classnames to add to whatever kind of object this is at output. Subclasses of EditHandler should override this, invoking super(B, self).classes() to append more classes specific to the situation. """ classes = [] try: classes.append(self.classname) except AttributeError: pass return classes def field_type(self): """ The kind of field it is e.g boolean_field. Useful for better semantic markup of field display based on type """ return "" def id_for_label(self): """ The ID to be used as the 'for' attribute of any <label> elements that refer to this object but are rendered outside of it. Leave blank if this object does not render as a single input field. """ return "" def render_as_object(self): """ Render this object as it should appear within an ObjectList. Should not include the <h2> heading or help text - ObjectList will supply those """ # by default, assume that the subclass provides a catch-all render() method return self.render() def render_as_field(self): """ Render this object as it should appear within a <ul class="fields"> list item """ # by default, assume that the subclass provides a catch-all render() method return self.render() def render_missing_fields(self): """ Helper function: render all of the fields that are defined on the form but not "claimed" by any panels via required_fields. These fields are most likely to be hidden fields introduced by the forms framework itself, such as ORDER / DELETE fields on formset members. (If they aren't actually hidden fields, then they will appear as ugly unstyled / label-less fields outside of the panel furniture. But there's not much we can do about that.) """ rendered_fields = self.required_fields() missing_fields_html = [ text_type(self.form[field_name]) for field_name in self.form.fields if field_name not in rendered_fields ] return mark_safe(''.join(missing_fields_html)) def render_form_content(self): """ Render this as an 'object', ensuring that all fields necessary for a valid form submission are included """ return mark_safe(self.render_as_object() + self.render_missing_fields()) class BaseCompositeEditHandler(EditHandler): """ Abstract class for EditHandlers that manage a set of sub-EditHandlers. Concrete subclasses must attach a 'children' property """ _widget_overrides = None @classmethod def widget_overrides(cls): if cls._widget_overrides is None: # build a collated version of all its children's widget lists widgets = {} for handler_class in cls.children: widgets.update(handler_class.widget_overrides()) cls._widget_overrides = widgets return cls._widget_overrides _required_fields = None @classmethod def required_fields(cls): if cls._required_fields is None: fields = [] for handler_class in cls.children: fields.extend(handler_class.required_fields()) cls._required_fields = fields return cls._required_fields _required_formsets = None @classmethod def required_formsets(cls): if cls._required_formsets is None: formsets = {} for handler_class in cls.children: formsets.update(handler_class.required_formsets()) cls._required_formsets = formsets return cls._required_formsets @classmethod def html_declarations(cls): return mark_safe(''.join([c.html_declarations() for c in cls.children])) def __init__(self, instance=None, form=None): super(BaseCompositeEditHandler, self).__init__(instance=instance, form=form) self.children = [ handler_class(instance=self.instance, form=self.form) for handler_class in self.__class__.children ] def render(self): return mark_safe(render_to_string(self.template, { 'self': self })) class BaseTabbedInterface(BaseCompositeEditHandler): template = "wagtailadmin/edit_handlers/tabbed_interface.html" class TabbedInterface(object): def __init__(self, children): self.children = children def bind_to_model(self, model): return type(str('_TabbedInterface'), (BaseTabbedInterface,), { 'model': model, 'children': [child.bind_to_model(model) for child in self.children], }) class BaseObjectList(BaseCompositeEditHandler): template = "wagtailadmin/edit_handlers/object_list.html" class ObjectList(object): def __init__(self, children, heading="", classname=""): self.children = children self.heading = heading self.classname = classname def bind_to_model(self, model): return type(str('_ObjectList'), (BaseObjectList,), { 'model': model, 'children': [child.bind_to_model(model) for child in self.children], 'heading': self.heading, 'classname': self.classname, }) class BaseFieldRowPanel(BaseCompositeEditHandler): template = "wagtailadmin/edit_handlers/field_row_panel.html" class FieldRowPanel(object): def __init__(self, children, classname=""): self.children = children self.classname = classname def bind_to_model(self, model): return type(str('_FieldRowPanel'), (BaseFieldRowPanel,), { 'model': model, 'children': [child.bind_to_model(model) for child in self.children], 'classname': self.classname, }) class BaseMultiFieldPanel(BaseCompositeEditHandler): template = "wagtailadmin/edit_handlers/multi_field_panel.html" def classes(self): classes = super(BaseMultiFieldPanel, self).classes() classes.append("multi-field") return classes class MultiFieldPanel(object): def __init__(self, children, heading="", classname=""): self.children = children self.heading = heading self.classname = classname def bind_to_model(self, model): return type(str('_MultiFieldPanel'), (BaseMultiFieldPanel,), { 'model': model, 'children': [child.bind_to_model(model) for child in self.children], 'heading': self.heading, 'classname': self.classname, }) class BaseFieldPanel(EditHandler): TEMPLATE_VAR = 'field_panel' @classmethod def widget_overrides(cls): """check if a specific widget has been defined for this field""" if hasattr(cls, 'widget'): return {cls.field_name: cls.widget} else: return {} def __init__(self, instance=None, form=None): super(BaseFieldPanel, self).__init__(instance=instance, form=form) self.bound_field = self.form[self.field_name] self.heading = self.bound_field.label self.help_text = self.bound_field.help_text def classes(self): classes = super(BaseFieldPanel, self).classes() if self.bound_field.field.required: classes.append("required") if self.bound_field.errors: classes.append("error") classes.append(self.field_type()) return classes def field_type(self): return camelcase_to_underscore(self.bound_field.field.__class__.__name__) def id_for_label(self): return self.bound_field.id_for_label object_template = "wagtailadmin/edit_handlers/single_field_panel.html" def render_as_object(self): return mark_safe(render_to_string(self.object_template, { 'self': self, self.TEMPLATE_VAR: self, 'field': self.bound_field, })) field_template = "wagtailadmin/edit_handlers/field_panel_field.html" def render_as_field(self): context = { 'field': self.bound_field, 'field_type': self.field_type(), } return mark_safe(render_to_string(self.field_template, context)) @classmethod def required_fields(self): return [self.field_name] class FieldPanel(object): def __init__(self, field_name, classname="", widget=None): self.field_name = field_name self.classname = classname self.widget = widget def bind_to_model(self, model): base = { 'model': model, 'field_name': self.field_name, 'classname': self.classname, } if self.widget: base['widget'] = self.widget return type(str('_FieldPanel'), (BaseFieldPanel,), base) class BaseRichTextFieldPanel(BaseFieldPanel): pass class RichTextFieldPanel(object): def __init__(self, field_name): self.field_name = field_name def bind_to_model(self, model): return type(str('_RichTextFieldPanel'), (BaseRichTextFieldPanel,), { 'model': model, 'field_name': self.field_name, }) class BaseChooserPanel(BaseFieldPanel): """ Abstract superclass for panels that provide a modal interface for choosing (or creating) a database object such as an image, resulting in an ID that is used to populate a hidden foreign key input. Subclasses provide: * field_template (only required if the default template of field_panel_field.html is not usable) * object_type_name - something like 'image' which will be used as the var name for the object instance in the field_template """ def get_chosen_item(self): field = self.instance._meta.get_field(self.field_name) related_model = get_related_parent_model(field.related) try: return getattr(self.instance, self.field_name) except related_model.DoesNotExist: # if the ForeignKey is null=False, Django decides to raise # a DoesNotExist exception here, rather than returning None # like every other unpopulated field type. Yay consistency! return None def render_as_field(self): instance_obj = self.get_chosen_item() context = { 'field': self.bound_field, self.object_type_name: instance_obj, 'is_chosen': bool(instance_obj), # DEPRECATED - passed to templates for backwards compatibility only } return mark_safe(render_to_string(self.field_template, context)) class BasePageChooserPanel(BaseChooserPanel): object_type_name = "page" _target_content_type = None @classmethod def widget_overrides(cls): return {cls.field_name: widgets.AdminPageChooser( content_type=cls.target_content_type(), can_choose_root=cls.can_choose_root)} @classmethod def target_content_type(cls): if cls._target_content_type is None: if cls.page_type: target_models = [] for page_type in cls.page_type: try: target_models.append(resolve_model_string(page_type)) except LookupError: raise ImproperlyConfigured( "{0}.page_type must be of the form 'app_label.model_name', given {1!r}".format( cls.__name__, page_type ) ) except ValueError: raise ImproperlyConfigured( "{0}.page_type refers to model {1!r} that has not been installed".format( cls.__name__, page_type ) ) cls._target_content_type = list(ContentType.objects.get_for_models(*target_models).values()) else: target_model = cls.model._meta.get_field(cls.field_name).rel.to cls._target_content_type = [ContentType.objects.get_for_model(target_model)] return cls._target_content_type class PageChooserPanel(object): def __init__(self, field_name, page_type=None, can_choose_root=False): self.field_name = field_name if page_type: # Convert single string/model into list if not isinstance(page_type, (list, tuple)): page_type = [page_type] else: page_type = [] self.page_type = page_type self.can_choose_root = can_choose_root def bind_to_model(self, model): return type(str('_PageChooserPanel'), (BasePageChooserPanel,), { 'model': model, 'field_name': self.field_name, 'page_type': self.page_type, 'can_choose_root': self.can_choose_root, }) class BaseInlinePanel(EditHandler): @classmethod def get_panel_definitions(cls): # Look for a panels definition in the InlinePanel declaration if cls.panels is not None: return cls.panels # Failing that, get it from the model else: return extract_panel_definitions_from_model_class( get_related_model(cls.related), exclude=[cls.related.field.name] ) _child_edit_handler_class = None @classmethod def get_child_edit_handler_class(cls): if cls._child_edit_handler_class is None: panels = cls.get_panel_definitions() cls._child_edit_handler_class = MultiFieldPanel( panels, heading=cls.heading ).bind_to_model(get_related_model(cls.related)) return cls._child_edit_handler_class @classmethod def required_formsets(cls): child_edit_handler_class = cls.get_child_edit_handler_class() return { cls.relation_name: { 'fields': child_edit_handler_class.required_fields(), 'widgets': child_edit_handler_class.widget_overrides(), 'min_num': cls.min_num, 'validate_min': cls.min_num is not None, 'max_num': cls.max_num, 'validate_max': cls.max_num is not None } } def __init__(self, instance=None, form=None): super(BaseInlinePanel, self).__init__(instance=instance, form=form) self.formset = form.formsets[self.__class__.relation_name] child_edit_handler_class = self.__class__.get_child_edit_handler_class() self.children = [] for subform in self.formset.forms: # override the DELETE field to have a hidden input subform.fields['DELETE'].widget = forms.HiddenInput() # ditto for the ORDER field, if present if self.formset.can_order: subform.fields['ORDER'].widget = forms.HiddenInput() self.children.append( child_edit_handler_class(instance=subform.instance, form=subform) ) # if this formset is valid, it may have been re-ordered; respect that # in case the parent form errored and we need to re-render if self.formset.can_order and self.formset.is_valid(): self.children = sorted(self.children, key=lambda x: x.form.cleaned_data['ORDER']) empty_form = self.formset.empty_form empty_form.fields['DELETE'].widget = forms.HiddenInput() if self.formset.can_order: empty_form.fields['ORDER'].widget = forms.HiddenInput() self.empty_child = child_edit_handler_class(instance=empty_form.instance, form=empty_form) template = "wagtailadmin/edit_handlers/inline_panel.html" def render(self): formset = render_to_string(self.template, { 'self': self, 'can_order': self.formset.can_order, }) js = self.render_js_init() return widget_with_script(formset, js) js_template = "wagtailadmin/edit_handlers/inline_panel.js" def render_js_init(self): return mark_safe(render_to_string(self.js_template, { 'self': self, 'can_order': self.formset.can_order, })) class InlinePanel(object): def __init__(self, relation_name, panels=None, label='', help_text='', min_num=None, max_num=None): self.relation_name = relation_name self.panels = panels self.label = label self.help_text = help_text self.min_num = min_num self.max_num = max_num def bind_to_model(self, model): return type(str('_InlinePanel'), (BaseInlinePanel,), { 'model': model, 'relation_name': self.relation_name, 'related': getattr(model, self.relation_name).related, 'panels': self.panels, 'heading': self.label, 'help_text': self.help_text, # TODO: can we pick this out of the foreign key definition as an alternative? # (with a bit of help from the inlineformset object, as we do for label/heading) 'min_num': self.min_num, 'max_num': self.max_num }) # This allows users to include the publishing panel in their own per-model override # without having to write these fields out by hand, potentially losing 'classname' # and therefore the associated styling of the publishing panel def PublishingPanel(): return MultiFieldPanel([ FieldRowPanel([ FieldPanel('go_live_at'), FieldPanel('expire_at'), ], classname="label-above"), ], ugettext_lazy('Scheduled publishing'), classname="publishing") # Now that we've defined EditHandlers, we can set up wagtailcore.Page to have some. Page.content_panels = [ FieldPanel('title', classname="full title"), ] Page.promote_panels = [ MultiFieldPanel([ FieldPanel('slug'), FieldPanel('seo_title'), FieldPanel('show_in_menus'), FieldPanel('search_description'), ], ugettext_lazy('Common page configuration')), ] Page.settings_panels = [ PublishingPanel() ] class BaseStreamFieldPanel(BaseFieldPanel): def classes(self): classes = super(BaseStreamFieldPanel, self).classes() classes.append("stream-field") # In case of a validation error, BlockWidget will take care of outputting the error on the # relevant sub-block, so we don't want the stream block as a whole to be wrapped in an 'error' class. if 'error' in classes: classes.remove("error") return classes @classmethod def html_declarations(cls): return cls.block_def.all_html_declarations() def id_for_label(self): # a StreamField may consist of many input fields, so it's not meaningful to # attach the label to any specific one return "" class StreamFieldPanel(object): def __init__(self, field_name): self.field_name = field_name def bind_to_model(self, model): return type(str('_StreamFieldPanel'), (BaseStreamFieldPanel,), { 'model': model, 'field_name': self.field_name, 'block_def': model._meta.get_field(self.field_name).stream_block })
bsd-3-clause
-5,252,673,871,227,266,000
33.073791
115
0.631581
false
4.217638
false
false
false
birknilson/oyster
src/oyster.py
1
27361
# -*- coding: utf-8 -*- """ Oyster ~~~~~ **A Python parser of shell commands.** This module strives to support commands executed within the sh, bash and zsh shells alike. An important limitation to mention is that Oyster does not support parsing of scripted commands, i.e: for i in $(seq 10); do echo $i; done This might change in a future version of Oyster - at least in order to support one-liners like the one above. *Features to be included in upcoming releases:* - Extended :class:`Chain` API to ease extending the chain with additional commands and various control operators. - Parse command substitutions :copyright: (c) 2014 by Birk Nilson. :license: MIT, see LICENSE for more details. """ import shlex from subprocess import list2cmdline __author__ = 'Birk Nilson <birk@tictail.com>' __copyright__ = 'Copyright 2014, Birk Nilson' __license__ = 'MIT' __version__ = '0.1.0' __all__ = [ # Constants 'RESERVED_WORDS', 'CONTROL_OPERATORS', 'STDIN', 'STDOUT', 'STDERR', 'STDFD_MAPPING', 'DEBUG', # Classes 'Redirect', 'Chain', 'Command', # Functions 'split_token_by_operators', 'tokenize', 'is_comment', 'is_script', 'is_quoted', 'is_command', 'parse', ] #: How verbose Oyster debugging should be:: #: * 0 turns of debugging #: * 1 adds basic parse debugging #: * 2 adds tokenize debugging DEBUG = 0 #: Set of words which are reserved in the shell. #: See: http://bit.ly/1baSfhM#tag_02_04 RESERVED_WORDS = frozenset([ '!', ';', '{', '}', 'case', 'do', 'done', 'elif', 'else', 'esac', 'fi', 'for', 'if', 'in', 'then', 'until', 'while', ]) #: Control operators which chain multiple commands CONTROL_OPERATORS = frozenset([';', '|', '&&', '||']) #: Lookup dictionary of control operators CONTROL_OPERATOR_LOOKUP = dict(zip(CONTROL_OPERATORS, CONTROL_OPERATORS)) #: The file descriptor of the standard input file STDIN = 0 #: The file descriptor of the standard output file STDOUT = 1 #: The file descriptor of the standard error file STDERR = 2 #: Mapping of the standard file descriptors and their common names STDFD_MAPPING = { STDIN: 'stdin', STDOUT: 'stdout', STDERR: 'stderr', } class Redirect(object): """A :class:`Redirect` instance represents the various output redirections performed by the command it is attached to. Each redirect has a :attr:`source` and :attr:`destination` in which the source is the value of the standard file descriptor to be redirected to the given :attr:`destination` - which can be either a file descriptor or a filename. The method in which the redirect is performed is determined by the :attr:`mode` which can be either ``w`` or ``a``. The ``w`` mode will write to the :attr:`destination` while ``a`` will append to it, i.e '>' vs. '>>'. When a shell command is parsed all redirects will automatically be initiated and assigned to their respective command as shown below: >>> import oyster >>> cmd = 'cp -v -r myfiles/* >> copied.log 2>> errors.log' >>> command = oyster.parse(cmd)[0] >>> str(command.redirects[0]) '>> copied.log' >>> str(command.redirects[1]) '2>> errors.log' >>> command.redirects[0].is_source_stdout() True :param source: An integer representing the standard file descriptor to be redirected. :param destination: Either an integer representing the standard file descriptor which output should be redirected to or a string representing the filename. :param mode: Either ``w`` or ``a`` depending on whether the redirect should write or append its output to the :attr:`destination`. """ def __init__(self, source, destination, mode='w'): #: Which standard file descriptor to be redirected self.source = source #: The destination of the redirect which can either be a standard #: file descriptor (integer) or a filename (string. self.destination = destination if self.is_destination_stdfd(): mode = 'w' #: The mode in which the redirect should be performed. #: ``w`` represents writes (>) & ``a`` represents appends (>>). self.mode = mode def is_source_stdin(self): """Check if the source is the standard input file descriptor.""" return self.source == STDIN def is_source_stdout(self): """Check if the source is the standard output file descriptor.""" return self.source == STDOUT def is_source_stderr(self): """Check if the source is the standard error file descriptor.""" return self.source == STDERR def is_destination_stdfd(self): """Check if the destination is a standard file descriptor.""" return self.destination in STDFD_MAPPING def is_destination_stdin(self): """Check if the destination is the standard input file descriptor.""" return self.destination == STDIN def is_destination_stdout(self): """Check if the destination is the standard output file descriptor.""" return self.destination == STDOUT def is_destination_stderr(self): """Check if the destination is the standard error file descriptor.""" return self.destination == STDERR def __str__(self): source = str(self.source) if not self.is_source_stdout() else '' if not self.is_destination_stdfd(): separator = ' ' operator = '>' if self.mode == 'w' else '>>' else: separator = '' operator = '>&' destination = str(self.destination) as_string = '{source}{operator}{separator}{destination}' return as_string.format(source=source, operator=operator, separator=separator, destination=destination) class Chain(object): """A list-like object containing all the individual commands which have been chained together using control operators in the shell. Unlike a regular Python list the :class:`Chain` instance does not implement the ``.extend``, ``.sort`` and ``.count`` methods. Also it introduces the ``chain_by`` parameter to the ``.append`` and ``.insert`` methods. Oyster treats all shell commands as a chain even in the case of a single program being executed. This is a design choice to simplify usage of the module since it is easier if :func:`parse` consistently returns the same type. As shown here: >>> import oyster >>> commands = oyster.parse('ps aux | grep python') >>> len(commands) 2 >>> ps, grep = commands >>> ps.arguments ('aux',) >>> ps = oyster.parse('ps aux')[0] >>> ps.program 'ps' """ def __init__(self): #: A list containing all the individual :class:`Command` instances self.commands = [] self._strings = [] self._operators = [] def append(self, command, chained_by=None): """C.append(command[, chained_by=';']) Append given ``command`` to the chain with the ``chained_by`` as the separating control operator. :param command: A string representing the command or an instance of :class:`Command` :param chained_by: One of the control operators defined in the :attr:`CONTROL_OPERATORS` constant. The default is ``;``. """ command = self._normalize_command(command) chained_by = self._normalize_chained_by(chained_by) self.commands.append(command) self._strings.append(str(command)) self._operators.append(chained_by) def insert(self, index, command, chained_by=None): """C.insert(index, command[, chained_by=';']) Insert given ``command`` to the chain at ``index`` with the ``chained_by`` as the separating control operator. :param index: At which index of the chain to insert the command :param command: A string representing the command or an instance of :class:`Command` :param chained_by: One of the control operators defined in the :attr:`CONTROL_OPERATORS` constant. The default is ``;``. """ command = self._normalize_command(command) chained_by = self._normalize_chained_by(chained_by) self.commands.insert(index, command) self._strings.insert(index, str(command)) self._operators.insert(index, chained_by) def index(self, command, *args): """C.index(command, [start, [stop]]) -> first index of command. Raises ValueError if the command is not present. :param command: A string representing the command or an instance of :class:`Command` :param start: At which index to start the search :param stop: At which index to stop the search """ if hasattr(command, 'get_options'): return self.commands.index(command, *args) return self._strings.index(command, *args) def pop(self, *args): """C.pop([index]) -> command -- remove and return item at index (default last). Raises IndexError if list is empty or index is out of range. :param index: Which command to pop by index """ ret = self.commands.pop(*args) self._strings.pop(*args) self._operators.pop(*args) return ret def remove(self, command): """C.remove(command) -- remove first occurrence of command. Raises ValueError if the value is not present. :param command: A string representing the command or an instance of :class:`Command` """ index = self.index(command) del self.commands[index] del self._strings[index] del self._operators[index] def __add__(self, chain): if hasattr(chain, 'isalpha'): chain = parse(chain) c = Chain() c.commands = self.commands + chain.commands c._strings = self._strings + chain._strings c._operators = self._operators + chain._operators return c def __iadd__(self, chain): if hasattr(chain, 'isalpha'): chain = parse(chain) self.commands += chain.commands self._strings += chain._strings self._operators += chain._operators return self def __contains__(self, command): if not hasattr(command, 'isalpha'): return command in self.commands return command in self._strings def __delitem__(self, *args): self.commands.__delitem__(*args) self._strings.__delitem__(*args) self._operators.__delitem__(*args) def __delslice__(self, *args): self.commands.__delslice__(*args) self._strings.__delslice__(*args) self._operators.__delslice__(*args) def __eq__(self, chain): return str(self) == str(chain) def __ne__(self, chain): return not self.__eq__(chain) def __getitem__(self, index): return self.commands.__getitem__(index) def __getslice__(self, *args): c = Chain() c.commands = self.commands.__getslice__(*args) c._strings = self._strings.__getslice__(*args) c._operators = self._operators.__getslice__(*args) return c def __len__(self): return self.commands.__len__() def __str__(self): operators = self._operators[:] operators[0] = None commands = [str(command) for command in self.commands] components = [] for index, operator in enumerate(operators): if operator: whitespace = ' ' if operator == ';': whitespace = '' components.append('{0}{1} '.format(whitespace, operator)) components.append(commands[index]) return ''.join(components) def _normalize_command(self, command): if hasattr(command, 'get_options'): return command chain = parse(command) if not chain: raise ValueError('invalid command') return chain.pop() def _normalize_chained_by(self, chained_by): if not chained_by: return ';' if chained_by in CONTROL_OPERATORS: return chained_by raise ValueError('invalid control operator given') class Command(object): """A representation of a single - unchained - command. Contains the name of the program being executed along with all the arguments passed to it. Furthermore, it processes the given arguments to convert them into ``options``: A dictionary mapping options to their given values. An argument is considered an option in case it is prefixed with ``-``. In other words ``-v``, ``--install`` and ``-c`` are all considered to be options. **Caveat #1:** How their values are retrieved is an interesting topic. The easiest case is the scenario of an argument being --foo=bar. Then the option name is ``foo`` and its corresponding value ``bar``. Single-hyphenated arguments is a trickier matter though. Consider the following: pip install -v -r requirements.txt In the case above Oyster will treat the ``-v`` argument as a boolean option, i.e giving it a value of ``True``. In case all single-hyphenated arguments would be considered boolean options then, everyone who knows pip, will know that the stored value would be useless & incorrect. Therefore, in the case a single-hypenhated argument is followed by a non-hypenated argument the latter is considered the formers value. Naturally, this is not bulletproof neither, but it is better to be more greedy in this scenario since the arguments are also kept, untouched, in the :attr:`arguments` attribute. After all: Determening how the arguments should be handled is ultimately up to the targetted program in the command. **Caveat #2:** The :attr:`as_string` and thus str(self) value is retrieved using the ``subprocess.list2cmdline`` function. In case the command is retrieved via :func:`parse` this opens up for the possibility of minor differences in how command arguments are quoted. Therefore, a direct comparison of the input command and the string representation of its instance is not guaranteed to be successful. :param tokens: A list of all the tokens the command consists of """ def __init__(self, tokens): #: Name of the program which the command is executing self.program = tokens[0] #: A tuple of all the arguments passed to the program self.arguments = tuple(tokens[1:]) #: A tuple containing all tokens which the command consists of. #: In other words: tuple([self.program] + list(self.arguments)) self.tokens = tuple(tokens) #: The string representation of the command. Used in str(self) self.as_string = list2cmdline(self.tokens) #: A tuple containing all the instances of :class:`Redirect` #: found during processing of the command. self.redirects = tuple([]) self._process_arguments(self.arguments) def get_options(self): """Retrieve a copy of the command options. A copy is returned to prevent tampering with the instance options. The :class:`Command` class is not designed to support mutations. """ # Changes to the options dict will not propagate to the # tokens, arguments or string representation of the command. # Therefore, the options are intended to be read-only which this # API hopefully makes clear by making the attribute "private" and # the accessor return a copy of the dict. return self._options.copy() def has_option(self, name): """Check whether the command includes the given option ``name``. :param name: Name of the option including hyphens. """ return name in self._options def get_option_values(self, name, *args): """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. :param name: Name of the option including hyphens. """ return self._options.get(name, *args) def get_option_count(self, name): """Return the amount of values stored for the given options. :param name: Name of the option including hyphens. """ values = self.get_option_values(name) if values: return len(values) return 0 def __str__(self): return self.as_string def _register_redirect(self, token, output_file=None): if is_quoted(token): return index = token.find('>') if index == -1: return source = 1 if index: try: source = int(token[index - 1]) except ValueError: pass mode = 'w' destination = None try: next_index = index + 1 if token[next_index] == '&': destination = int(token[next_index:]) elif token[next_index] == '>': mode = 'a' destination = output_file except (IndexError, ValueError): pass if not destination: return if hasattr(destination, 'lstrip'): destination = destination.lstrip() r = Redirect(source, destination, mode=mode) redirects = list(self.redirects) redirects.append(r) self.redirects = tuple(redirects) def _process_arguments(self, arguments): def sanitize_value(value): if not hasattr(value, 'isalpha'): return value if is_quoted(value): value = value[1:-1] return value def get_value(next_token): if (hasattr(next_token, 'startswith') and not next_token.startswith('-')): return sanitize_value(next_token) return True options = {} for index, token in enumerate(arguments): try: next_token = arguments[index + 1] except IndexError: next_token = None if not token.startswith('-'): self._register_redirect(token, output_file=next_token) continue if token.startswith('--'): key, _, value = token.partition('=') if value: value = sanitize_value(value) else: value = get_value(next_token) options.setdefault(key, []).append(value) else: keys = list(token[1:]) for key in keys: value = get_value(next_token) options.setdefault('-' + key, []).append(value) self._options = options def debug(message, level=1, exit=False): if DEBUG >= level: print message def debug_section(key, value, level=1): debug(""" %(key)s: %(value)s """ % dict(key=key.upper(), value=value)) def split_token_by_operators(token): """Split the given ``token`` by all containing :attr:`CONTROL_OPERATORS`. Each unquoted token longer than a single character is required to do this during tokenization of a command. Otherwise, commands which are not properly spaced will be treated incorrectly. As illustrated below: >>> import shlex >>> import oyster >>> cmd = 'cd /some/path;ls' >>> tokens = shlex.split(cmd, posix=True) >>> tokens ['cd', '/some/path;ls'] >>> processed = oyster.split_token_by_operators(tokens[1]) >>> processed ['/some/path', ';', 'ls'] >>> tokens = [tokens[0]] >>> tokens.extend(processed) >>> tokens ['cd', '/some/path', ';', 'ls'] :param token: The token to check for control operators """ if len(token) <= 1 or is_quoted(token): return [token] tokens = [] characters = [] consume_next = False previous_character = None for index, character in enumerate(token): if consume_next: consume_next = False previous_character = character continue try: next_character = token[index + 1] except IndexError: next_character = '' is_escaped = (character == '\\' and previous_character != '\\' and next_character != '\\') if is_escaped: characters.append(character) characters.append(next_character) consume_next = True continue found = False for operator in CONTROL_OPERATORS: if operator == character: found = True break if operator == character + next_character: found = True consume_next = True break previous_character = character if found: tokens.append(''.join(characters)) tokens.append(operator) characters = [] else: characters.append(character) if characters: tokens.append(''.join(characters)) return tokens def tokenize(string): """Tokenize given ``string`` and return a list containing all the tokens. The workhorse behind this function is the ``shlex`` module. However, tokens found via ``shlex`` are processed to ensure we handle command substitutions along with chained commands properly. :paramter string: The command - as a string - to tokenize """ processed = [] lex = shlex.shlex(string, posix=True) lex.whitespace_split = True lex.commenters = '' in_substitution = False substitution_closer = None substitution_tokens = [] while True: token = lex.get_token() title = '[TOKEN | IN SUBSTITUTION]' if in_substitution else '[TOKEN]' debug_section(title, token, level=2) if token is None: debug('- Abort. Empty token', level=2) break if in_substitution: substitution_tokens.append(token) if token.endswith(substitution_closer): debug('- Command substitution closed.') processed.append(''.join(substitution_tokens)) substitution_tokens = [] in_substitution = False continue if token.startswith('$('): debug('- Command substitution detected using $(', level=2) in_substitution = True substitution_closer = ')' substitution_tokens.append(token) continue if token.startswith('`'): debug('- Command substitution detected using `', level=2) in_substitution = True substitution_closer = '`' substitution_tokens.append(token) continue # Handle the case of: cd /some/path&&ls processed.extend(split_token_by_operators(token)) if substitution_tokens: processed.append(''.join(substitution_tokens)) return processed def is_comment(string): """Check whether given string is considered to be a comment. :param string: The string, i.e command, to check """ return string.lstrip()[0] == '#' def is_script(string): """Check whether given string is considered to be a script. This function oversimplifies what a shell script is, but covers the necessary basics for this module. :param string: The string, i.e command, to check """ is_script = False string = string.lstrip() for reserved in RESERVED_WORDS: if string.startswith(reserved): is_script = True break return is_script def is_quoted(string): """Check whether given string is quoted. :param string: The string, i.e command, to check """ string = string.lstrip() return ((string.startswith('"') and string.endswith('"')) or (string.startswith("'") and string.endswith("'"))) def is_command(string, tokens=None): """Check whether given string is considered to be a command. :param string: The string, i.e command, to check """ if not string: return False if is_comment(string): return False if is_quoted(string): return False if is_script(string): return False return True def parse(string): """Parse given ``string`` into a :class:`Chain` of :class:`Command` s. >>> import oyster >>> cmd = 'pip search -vvv --timeout=5 flask | grep session | less' >>> chain = oyster.parse(cmd) >>> len(chain) 3 >>> pip, grep, less = chain >>> pip.has_option('--timeout') True >>> pip.get_option_values('--timeout') ['5'] >>> pip.get_option_count('-v') 3 >>> pip.arguments ('search', '--timeout=5', 'flask') >>> str(grep) 'grep session' >>> str(less) 'less' >>> chain.remove('less') >>> str(chain) 'pip search -vvv --timeout=5 flask | grep session' >>> chain += 'date -u' >>> str(chain) 'pip search -vvv --timeout=5 flask | grep session; date -u' >>> utc_date = chain[chain.index('date -u')] >>> str(utc_date) 'date -u' >>> utc_date.get_option_values('-u') [True] :param string: The string, i.e command, to parse """ try: chain = Chain() string = string.strip() if DEBUG: print '**********************************************************' debug_section('String to parse', string) if not (string or hasattr(string, 'isalpha')): debug_section('Abort', 'Given command is not a string') return chain tokens = tokenize(string) debug_section('Tokens', tokens) if not is_command(string, tokens): debug_section('Abort', 'Given string was not a command') return chain chained_by = None command_tokens = [] to_parse = tokens + [';'] for index, token in enumerate(to_parse): if token not in CONTROL_OPERATOR_LOOKUP: command_tokens.append(token) continue if is_script(command_tokens[0]): # Abort entire chain if script is detected chain = Chain() debug_section('Abort', 'Script detected') break command = Command(command_tokens) chain.append(command, chained_by=chained_by) debug_section('Command chained (%s)' % chained_by, command) chained_by = token command_tokens = [] except Exception as e: debug_section('Exception thrown', e) raise return chain
mit
-4,775,107,700,802,405,000
32.164848
87
0.588173
false
4.413065
false
false
false
Ronnasayd/Ifuzzy2py
test_gauss.py
1
2012
#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Copyright 2017 Ronnasayd Machado <ronnasayd@hotmail.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from IMfuzzy2 import IMfuzzy2 from Mfunction import Mfunction from Antecedent import Antecedent from Consequent import Consequent from Rule import Rule from Rules import Rules from Input import Input from Output import Output from Inputs import Inputs from Outputs import Outputs from Ifuzzy2 import Ifuzzy2 TRAP = 1 TRIA = 2 GAUS = 3 rules = Rules() upperBI = Mfunction(GAUS, 2, 0.8) lowerBI = Mfunction(GAUS, 2, 0.2) BaixaI = IMfuzzy2(lowerBI, upperBI) upperAI = Mfunction(GAUS, 3, 0.8) lowerAI = Mfunction(GAUS, 3, 0.2) AltaI = IMfuzzy2(lowerAI, upperAI) upperBO = Mfunction(GAUS, 2, 0.8) lowerBO = Mfunction(GAUS, 2, 0.2) BaixaO = IMfuzzy2(lowerBO, upperBO) upperAO = Mfunction(GAUS, 3, 0.8) lowerAO = Mfunction(GAUS, 3, 0.2) AltaO = IMfuzzy2(lowerAO, upperAO) I = Input(0, 5) I.addMf(BaixaI) I.addMf(AltaI) O = Output(0, 5) O.addMf(BaixaO) O.addMf(AltaO) inputs = Inputs() inputs.addInput(I) outputs = Outputs() outputs.addOutput(O) ant = Antecedent() ant.addMf(BaixaI) cont = Consequent() cont.addMf(AltaO) rule = Rule(ant, cont) rules.addRule(rule) ant = Antecedent() ant.addMf(AltaI) cont = Consequent() cont.addMf(BaixaO) rule = Rule(ant, cont) rules.addRule(rule) fuzzy = Ifuzzy2(inputs, outputs, rules,99) for x in range(0, 6): fuzzy.fuzzyfy([x]) y = fuzzy.defuzzyfy(1) [yl, yr] = fuzzy.getReducedFuzzy(1) print (yl,":", yr,":", y)
apache-2.0
-5,231,015,777,263,714,000
21.606742
72
0.73161
false
2.741144
false
false
false
Wyn10/Cnchi
cnchi/misc/keyboard_names.py
1
9010
#!/usr/bin/python # -*- coding: UTF-8 -*- # # keyboard_names.py # # Copyright © 2013-2016 Antergos # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ Parse base.xml """ import logging import os from gi.repository import GObject from collections import OrderedDict try: import xml.etree.cElementTree as eTree except ImportError as err: import xml.etree.ElementTree as eTree class Model(GObject.GObject): """ Represents a keyboard model """ def __init__(self, name, description, vendor): GObject.GObject.__init__(self) self.name = name self.description = description self.vendor = vendor def __repr__(self): """ Return model description """ return self.description class Variant(GObject.GObject): """ Keymap variant layout """ def __init__(self, name, short_description, description, language_list): GObject.GObject.__init__(self) self.name = name self.short_description = short_description self.description = description self.language_list = language_list def __repr__(self): """ Return variant description """ return self.description class Layout(GObject.GObject): """ Keymap layout """ def __init__(self, name, short_description, description, language_list): GObject.GObject.__init__(self) self.name = name self.short_description = short_description self.description = description self.language_list = language_list self.variants = {} def __repr__(self): """ Return layout description """ return self.description def add_variant(self, variant): """ Add new layout variant """ self.variants[variant.name] = variant def sort_variants(self): """ Sort variants """ self.variants = OrderedDict( sorted(self.variants.items(), key=lambda t: str(t[1]))) class KeyboardNames(object): """ Read all keyboard info (models, layouts and variants) """ def __init__(self, filename): self.models = None self.layouts = None self._filename = filename self._load_file() def _clear(self): """ Clear all data """ self.models = {} self.layouts = {} def _load_file(self): """ Load info from xml file """ if not os.path.exists(self._filename): logging.error("Can't find %s file!", self._filename) return self._clear() xml_tree = eTree.parse(self._filename) xml_root = xml_tree.getroot() for model in xml_root.iter('model'): for config_item in model.iter('configItem'): model_name = "" model_description = "" model_vendor = "" for item in config_item: if item.tag == "name": model_name = item.text elif item.tag == "description": model_description = item.text elif item.tag == "vendor": model_vendor = item.text # Store model self.models[model_name] = Model( model_name, model_description, model_vendor) for layout in xml_root.iter('layout'): for layout_item in layout: layout_language_list = [] if layout_item.tag == "configItem": for item in layout_item: if item.tag == "name": layout_name = item.text elif item.tag == "shortDescription": layout_short_description = item.text elif item.tag == "description": layout_description = item.text elif item.tag == "languageList": for lang in item: layout_language_list.append(lang.text) self.layouts[layout_name] = Layout( layout_name, layout_short_description, layout_description, layout_language_list) if layout_item.tag == "variantList": for variant in layout_item: variant_language_list = [] for config_item in variant: for item in config_item: if item.tag == "name": variant_name = item.text elif item.tag == "shortDescription": variant_short_description = item.text elif item.tag == "description": variant_description = item.text elif item.tag == "languageList": for lang in item: variant_language_list.append(lang.text) self.layouts[layout_name].add_variant( Variant( variant_name, variant_short_description, variant_description, variant_language_list)) self.sort_layouts() def sort_layouts(self): """ Sort stored layouts """ self.layouts = OrderedDict( sorted(self.layouts.items(), key=lambda t: str(t[1]))) for name in self.layouts: self.layouts[name].sort_variants() def get_layout(self, name): """ Get layout by its name """ if name in self.layouts: return self.layouts[name] else: return None def get_layouts(self): """ Return all layouts """ return self.layouts def get_layout_description(self, name): """ Get layout description by its name """ if name in self.layouts: return str(self.layouts[name]) else: return None def get_layout_by_description(self, description): """ Get layout by its description """ for name in self.layouts: if description == str(self.layouts[name]): return self.layouts[name] return None def get_layout_name_by_description(self, description): """ Get layout name by its description """ for name in self.layouts: if description == str(self.layouts[name]): return name return None def has_variants(self, name): """ Check if layout has variants """ return bool(self.layouts[name].variants) def get_variants(self, name): """ Get layout variants """ return self.layouts[name].variants def get_variant_description(self, name, variant_name): """ Get variant description by its name (and layout name)""" try: return str(self.layouts[name].variants[variant_name]) except KeyError as key_error: return None def get_variant_descriptions(self, name): """ Get all variant descriptions for layout 'name' """ descriptions = [] for variant_name in self.layouts[name].variants: description = str(self.layouts[name].variants[variant_name]) descriptions.append(description) return descriptions def get_variant_name_by_description(self, description): """ Get variant name by its description """ for layout_name in self.layouts: for variant_name in self.layouts[layout_name].variants: if description == str(self.layouts[layout_name].variants[variant_name]): return variant_name return None def test(): """ Test module """ base_xml_path = "/usr/share/cnchi/data/base.xml" kbd_names = KeyboardNames(base_xml_path) layouts = kbd_names.get_layouts() for name in layouts: print(name, layouts[name]) for variant_name in layouts[name].variants: print(layouts[name], "-", layouts[name].variants[variant_name]) if __name__ == '__main__': test()
gpl-3.0
5,801,357,571,530,852,000
34.329412
88
0.545233
false
4.670295
true
false
false
hwjworld/xiaodun-platform
cms/djangoapps/contentstore/views/checklist.py
1
5604
import json import copy from util.json_request import JsonResponse from django.http import HttpResponseBadRequest from django.contrib.auth.decorators import login_required from django.views.decorators.http import require_http_methods from django_future.csrf import ensure_csrf_cookie from edxmako.shortcuts import render_to_response from django.http import HttpResponseNotFound from django.core.exceptions import PermissionDenied from xmodule.modulestore.django import loc_mapper from ..utils import get_modulestore from .access import has_course_access from xmodule.course_module import CourseDescriptor from xmodule.modulestore.locator import BlockUsageLocator __all__ = ['checklists_handler'] # pylint: disable=unused-argument @require_http_methods(("GET", "POST", "PUT")) @login_required @ensure_csrf_cookie def checklists_handler(request, tag=None, package_id=None, branch=None, version_guid=None, block=None, checklist_index=None): """ The restful handler for checklists. GET html: return html page for all checklists json: return json representing all checklists. checklist_index is not supported for GET at this time. POST or PUT json: updates the checked state for items within a particular checklist. checklist_index is required. """ location = BlockUsageLocator(package_id=package_id, branch=branch, version_guid=version_guid, block_id=block) if not has_course_access(request.user, location): raise PermissionDenied() old_location = loc_mapper().translate_locator_to_location(location) modulestore = get_modulestore(old_location) course_module = modulestore.get_item(old_location) json_request = 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json') if request.method == 'GET': # If course was created before checklists were introduced, copy them over # from the template. if not course_module.checklists: course_module.checklists = CourseDescriptor.checklists.default modulestore.update_item(course_module, request.user.id) expanded_checklists = expand_all_action_urls(course_module) if json_request: return JsonResponse(expanded_checklists) else: handler_url = location.url_reverse('checklists/', '') return render_to_response('checklists.html', { 'handler_url': handler_url, # context_course is used by analytics 'context_course': course_module, 'checklists': expanded_checklists }) elif json_request: # Can now assume POST or PUT because GET handled above. if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists): index = int(checklist_index) persisted_checklist = course_module.checklists[index] modified_checklist = json.loads(request.body) # Only thing the user can modify is the "checked" state. # We don't want to persist what comes back from the client because it will # include the expanded action URLs (which are non-portable). for item_index, item in enumerate(modified_checklist.get('items')): persisted_checklist['items'][item_index]['is_checked'] = item['is_checked'] # seeming noop which triggers kvs to record that the metadata is # not default course_module.checklists = course_module.checklists course_module.save() modulestore.update_item(course_module, request.user.id) expanded_checklist = expand_checklist_action_url(course_module, persisted_checklist) return JsonResponse(expanded_checklist) else: return HttpResponseBadRequest( ("Could not save checklist state because the checklist index " "was out of range or unspecified."), content_type="text/plain" ) else: return HttpResponseNotFound() def expand_all_action_urls(course_module): """ Gets the checklists out of the course module and expands their action urls. Returns a copy of the checklists with modified urls, without modifying the persisted version of the checklists. """ expanded_checklists = [] for checklist in course_module.checklists: expanded_checklists.append(expand_checklist_action_url(course_module, checklist)) return expanded_checklists def expand_checklist_action_url(course_module, checklist): """ Expands the action URLs for a given checklist and returns the modified version. The method does a copy of the input checklist and does not modify the input argument. """ expanded_checklist = copy.deepcopy(checklist) urlconf_map = { "ManageUsers": "course_team", "CourseOutline": "course", "SettingsDetails": "settings/details", "SettingsGrading": "settings/grading", } for item in expanded_checklist.get('items'): action_url = item.get('action_url') if action_url in urlconf_map: url_prefix = urlconf_map[action_url] ctx_loc = course_module.location location = loc_mapper().translate_location(ctx_loc.course_id, ctx_loc, False, True) item['action_url'] = location.url_reverse(url_prefix, '') return expanded_checklist
agpl-3.0
-7,421,149,964,564,552,000
42.107692
125
0.662384
false
4.419558
false
false
false
Stanford-Online/edx-analytics-dashboard
analytics_dashboard/courses/urls.py
1
7466
# pylint: disable=no-value-for-parameter from django.conf import settings from django.conf.urls import url, include from courses import views from courses.views import ( course_summaries, csv, enrollment, engagement, performance, learners, ) CONTENT_ID_PATTERN = r'(?P<content_id>(?:i4x://?[^/]+/[^/]+/[^/]+/[^@]+(?:@[^/]+)?)|(?:[^/]+))' PROBLEM_PART_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'problem_part_id') ASSIGNMENT_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'assignment_id') PROBLEM_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'problem_id') SECTION_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'section_id') SUBSECTION_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'subsection_id') VIDEO_ID_PATTERN = CONTENT_ID_PATTERN.replace('content_id', 'video_id') PIPELINE_VIDEO_ID = r'(?P<pipeline_video_id>([^/+]+[/+][^/+]+[/+][^/]+)+[|]((?:i4x://?[^/]+/[^/]+/[^/]+' \ r'/[^@]+(?:@[^/]+)?)|(?:[^/]+)+))' TAG_VALUE_ID_PATTERN = r'(?P<tag_value>[\w-]+)' answer_distribution_regex = \ r'^graded_content/assignments/{assignment_id}/problems/{problem_id}/parts/{part_id}/answer_distribution/$'.format( assignment_id=ASSIGNMENT_ID_PATTERN, problem_id=PROBLEM_ID_PATTERN, part_id=PROBLEM_PART_ID_PATTERN) ungraded_answer_distribution_regex = \ r'^ungraded_content/sections/{}/subsections/{}/problems/{}/parts/{}/answer_distribution/$'.format( SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN, PROBLEM_ID_PATTERN, PROBLEM_PART_ID_PATTERN) video_timeline_regex = \ r'^videos/sections/{}/subsections/{}/modules/{}/timeline/$'.format( SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN, VIDEO_ID_PATTERN) ENROLLMENT_URLS = ([ url(r'^activity/$', enrollment.EnrollmentActivityView.as_view(), name='activity'), url(r'^geography/$', enrollment.EnrollmentGeographyView.as_view(), name='geography'), url(r'^demographics/age/$', enrollment.EnrollmentDemographicsAgeView.as_view(), name='demographics_age'), url(r'^demographics/education/$', enrollment.EnrollmentDemographicsEducationView.as_view(), name='demographics_education'), url(r'^demographics/gender/$', enrollment.EnrollmentDemographicsGenderView.as_view(), name='demographics_gender'), ], 'enrollment') ENGAGEMENT_URLS = ([ url(r'^content/$', engagement.EngagementContentView.as_view(), name='content'), url(r'^videos/$', engagement.EngagementVideoCourse.as_view(), name='videos'), # ordering of the URLS is important for routing the the section, subsection, etc. correctly url(video_timeline_regex, engagement.EngagementVideoTimeline.as_view(), name='video_timeline'), url(r'^videos/sections/{}/subsections/{}/$'.format(SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN), engagement.EngagementVideoSubsection.as_view(), name='video_subsection'), url(r'^videos/sections/{}/$'.format(SECTION_ID_PATTERN), engagement.EngagementVideoSection.as_view(), name='video_section'), ], 'engagement') PERFORMANCE_URLS = ([ url(r'^ungraded_content/$', performance.PerformanceUngradedContent.as_view(), name='ungraded_content'), url(ungraded_answer_distribution_regex, performance.PerformanceUngradedAnswerDistribution.as_view(), name='ungraded_answer_distribution'), url(r'^ungraded_content/sections/{}/subsections/{}/$'.format(SECTION_ID_PATTERN, SUBSECTION_ID_PATTERN), performance.PerformanceUngradedSubsection.as_view(), name='ungraded_subsection'), url(r'^ungraded_content/sections/{}/$'.format(SECTION_ID_PATTERN), performance.PerformanceUngradedSection.as_view(), name='ungraded_section'), url(r'^graded_content/$', performance.PerformanceGradedContent.as_view(), name='graded_content'), url(r'^graded_content/(?P<assignment_type>[\w-]+)/$', performance.PerformanceGradedContentByType.as_view(), name='graded_content_by_type'), url(answer_distribution_regex, performance.PerformanceAnswerDistributionView.as_view(), name='answer_distribution'), # This MUST come AFTER the answer distribution pattern; otherwise, the answer distribution pattern # will be interpreted as an assignment pattern. url(r'^graded_content/assignments/{}/$'.format(ASSIGNMENT_ID_PATTERN), performance.PerformanceAssignment.as_view(), name='assignment'), url(r'^learning_outcomes/$', performance.PerformanceLearningOutcomesContent.as_view(), name='learning_outcomes'), url(r'^learning_outcomes/{}/$'.format(TAG_VALUE_ID_PATTERN), performance.PerformanceLearningOutcomesSection.as_view(), name='learning_outcomes_section'), url(r'^learning_outcomes/{}/problems/{}/$'.format(TAG_VALUE_ID_PATTERN, PROBLEM_ID_PATTERN), performance.PerformanceLearningOutcomesAnswersDistribution.as_view(), name='learning_outcomes_answers_distribution'), url(r'^learning_outcomes/{}/problems/{}/{}/$'.format(TAG_VALUE_ID_PATTERN, PROBLEM_ID_PATTERN, PROBLEM_PART_ID_PATTERN), performance.PerformanceLearningOutcomesAnswersDistribution.as_view(), name='learning_outcomes_answers_distribution_with_part'), ], 'performance') CSV_URLS = ([ url(r'^enrollment/$', csv.CourseEnrollmentCSV.as_view(), name='enrollment'), url(r'^enrollment/geography/$', csv.CourseEnrollmentByCountryCSV.as_view(), name='enrollment_geography'), url(r'^enrollment/demographics/age/$', csv.CourseEnrollmentDemographicsAgeCSV.as_view(), name='enrollment_demographics_age'), url(r'^enrollment/demographics/education/$', csv.CourseEnrollmentDemographicsEducationCSV.as_view(), name='enrollment_demographics_education'), url(r'^enrollment/demographics/gender/$', csv.CourseEnrollmentDemographicsGenderCSV.as_view(), name='enrollment_demographics_gender'), url(r'^engagement/activity_trend/$', csv.CourseEngagementActivityTrendCSV.as_view(), name='engagement_activity_trend'), url(r'^engagement/videos/{}/$'.format(PIPELINE_VIDEO_ID), csv.CourseEngagementVideoTimelineCSV.as_view(), name='engagement_video_timeline'), url(r'^performance/graded_content/problems/{}/answer_distribution/{}/$'.format(CONTENT_ID_PATTERN, PROBLEM_PART_ID_PATTERN), csv.PerformanceAnswerDistributionCSV.as_view(), name='performance_answer_distribution'), url(r'problem_responses/', csv.PerformanceProblemResponseCSV.as_view(), name='performance_problem_responses') ], 'csv') LEARNER_URLS = ([ url(r'^$', learners.LearnersView.as_view(), name='learners'), ], 'learners') COURSE_URLS = [ # Course homepage. This should be the entry point for other applications linking to the course. url(r'^$', views.CourseHome.as_view(), name='home'), url(r'^enrollment/', include(ENROLLMENT_URLS)), url(r'^engagement/', include(ENGAGEMENT_URLS)), url(r'^performance/', include(PERFORMANCE_URLS)), url(r'^csv/', include(CSV_URLS)), url(r'^learners/', include(LEARNER_URLS)), ] app_name = 'courses' urlpatterns = [ url('^$', course_summaries.CourseIndex.as_view(), name='index'), url(r'^{}/'.format(settings.COURSE_ID_PATTERN), include(COURSE_URLS)), url(r'csv/course_list/$', course_summaries.CourseIndexCSV.as_view(), name='index_csv') ]
agpl-3.0
7,305,463,706,066,446,000
52.328571
120
0.682561
false
3.694211
false
false
false
nojhan/ereshkigal
tunnelmon.py
1
26267
#!/usr/bin/python3 # -*- coding: utf-8 -*- # # Ereshkigal is an AutoSSH tunnel monitor # It gives a curses user interface to monitor existing SSH tunnel that are managed with autossh. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Author : nojhan <nojhan@nojhan.net> # ################################################################################################# # CORE ################################################################################################# import os import subprocess import logging import psutil import socket import re import collections class Tunnel: def __init__(self, ssh_pid = None, in_port = None, via_host = None, target_host = None, out_port = None): # assert(ssh_pid != None) self.ssh_pid = ssh_pid assert(in_port!=None) self.in_port = in_port assert(via_host!=None) self.via_host = via_host assert(target_host!=None) self.target_host = target_host assert(out_port!=None) self.out_port = out_port self.connections = [] def repr_tunnel(self): return "%i\t%i\t%s\t%s\t%i" % ( self.ssh_pid, self.in_port, self.via_host, self.target_host, self.out_port) def repr_connections(self): # list of tunnels linked to this process rep = "" for c in self.connections: rep += "\n\t↳ %s" % c return rep def __repr__(self): return self.repr_tunnel() + self.repr_connections() class AutoTunnel(Tunnel): def __init__(self, autossh_pid = None, *args, **kwargs): super().__init__(*args, **kwargs) assert(autossh_pid!=None) self.autossh_pid = autossh_pid def repr_tunnel(self): rep = super().repr_tunnel() return "auto\t" + rep class RawTunnel(Tunnel): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def repr_tunnel(self): rep = super().repr_tunnel() return "ssh\t" + rep class Connection: """A dictionary that stores an SSH connection related to a tunnel""" def __init__(self, local_address = None, in_port = None, foreign_address = None, out_port = None, status = None, family = None ): # informations available with netstat assert(local_address!=None) self.local_address = local_address assert(in_port!=None) self.in_port = in_port self.foreign_address = foreign_address self.out_port = out_port assert(status!=None) self.status = status assert(family!=None) self.family = family self.family_rep = {socket.AddressFamily.AF_INET:"INET", socket.AddressFamily.AF_INET6:"INET6", socket.AddressFamily.AF_UNIX:"UNIX"} # FIXME would be nice to have an estimation of the connections latency #self.latency = 0 def __repr__(self): # do not logging.debug all the informations by default if self.foreign_address and self.out_port: return "%s\t%s\t%s:%i → %s:%i" % ( self.family_rep[self.family], self.status, self.local_address, self.in_port, self.foreign_address, self.out_port, ) else: return "%s\t%s\t%s:%i" % ( self.family_rep[self.family], self.status, self.local_address, self.in_port, ) class TunnelsParser: def __init__(self): """Warning: the initialization does not gather tunnels informations, use update() to do so""" # { ssh_pid : Tunnel } self.tunnels = collections.OrderedDict() # do not perform update by default # this is necessary because one may want # only a list of connections OR autossh processes #self.update() self.re_forwarding = re.compile(r"-L(\d+):(.+):(\d+)") self.header = 'TYPE\tSSH_PID\tIN_PORT\tVIA_HOST\tTARGET_HOST\tOUT_PORT' def get_tunnel(self, pos): pid = list(self.tunnels.keys())[pos] return self.tunnels[pid] def parse(self, cmd): cmdline = " ".join(cmd) logging.debug('autossh cmd line:', cmdline) logging.debug('forwarding regexp:', self.re_forwarding) match = self.re_forwarding.findall(cmdline) logging.debug(match) if match: assert(len(match)==1) in_port, target_host, out_port = match[0] logging.debug("matches: ", match) # Find the hostname on wich the tunnel is built. via_host = "unknown" # Search backward and take the first parameter argument. # FIXME this is an ugly hack for i in range( len(cmd)-1,0,-1 ): if cmd[i][0] != '-': via_host = cmd[i] break return (int(in_port), via_host, target_host, int(out_port)) def update(self): """Gather and parse informations from the operating system""" self.tunnels.clear() # Browse the SSH processes handling a tunnel. for proc in psutil.process_iter(): try: process = proc.as_dict(attrs=['pid','ppid','name','cmdline','connections']) cmd = process['cmdline'] except psutil.NoSuchProcess: pass else: if process['name'] == 'ssh': logging.debug(process) in_port, via_host, target_host, out_port = self.parse(cmd) logging.debug(in_port, via_host, target_host, out_port) # Check if this ssh tunnel is managed by autossh. parent = psutil.Process(process['ppid']) if parent.name() == 'autossh': # Add an autossh tunnel. pid = parent.pid # autossh pid self.tunnels[pid] = AutoTunnel(pid, process['pid'], in_port, via_host, target_host, out_port ) else: # Add a raw tunnel. pid = process['pid'] self.tunnels[pid] = RawTunnel(pid, in_port, via_host, target_host, out_port ) for c in process['connections']: logging.debug(c) laddr,lport = c.laddr if c.raddr: raddr,rport = c.raddr else: raddr,rport = (None,None) connection = Connection(laddr,lport,raddr,rport,c.status,c.family) logging.debug(connection) self.tunnels[pid].connections.append(connection) logging.debug(self.tunnels) def __repr__(self): reps = [self.header] for t in self.tunnels: reps.append(str(self.tunnels[t])) return "\n".join(reps) ################################################################################################# # INTERFACES ################################################################################################# import curses import time import signal class CursesMonitor: """Textual user interface to display up-to-date informations about current tunnels""" def __init__(self, scr): # curses screen self.scr = scr # tunnels monitor self.tp = TunnelsParser() # selected line self.cur_line = -1 # selected pid self.cur_pid = -1 # switch to show only autoss processes (False) or ssh connections also (True) self.show_connections = False # FIXME pass as parameters+options self.update_delay = 1 # seconds of delay between two data updates self.ui_delay = 0.05 # seconds between two screen update # colors # FIXME different colors for different types of tunnels (auto or raw) self.colors_tunnel = {'kind_auto':4, 'kind_raw':5, 'ssh_pid':0, 'in_port':3, 'via_host':2, 'target_host':2, 'out_port':3, 'tunnels_nb':4, 'tunnels_nb_none':1} self.colors_highlight = {'kind_auto':9, 'kind_raw':9, 'ssh_pid':9, 'in_port':9, 'via_host':9, 'target_host':9, 'out_port':9, 'tunnels_nb':9, 'tunnels_nb_none':9} self.colors_connection = {'ssh_pid':0, 'autossh_pid':0, 'status':4, 'status_out':1, 'local_address':2, 'in_port':3, 'foreign_address':2, 'out_port':3} self.header = ("TYPE","SSHPID","INPORT","VIA","TARGET","OUTPORT") def do_Q(self): """Quit""" logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("Key pushed: Q") return False def do_R(self): """Reload autossh tunnel""" logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("Key pushed: R") # if a pid is selected if self.cur_pid != -1: # send the SIGUSR1 signal if type(self.tp.get_tunnel(self.cur_line)) == AutoTunnel: # autossh performs a reload of existing tunnels that it manages logging.debug("SIGUSR1 on PID: %i" % self.cur_pid) os.kill( self.cur_pid, signal.SIGUSR1 ) else: logging.debug("Cannot reload a RAW tunnel") return True def do_C(self): """Close tunnel""" logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("Key pushed: C") if self.cur_pid != -1: # send a SIGKILL # the related process is stopped # FIXME SIGTERM or SIGKILL ? tunnel = self.tp.get_tunnel(self.cur_line) if type(tunnel) == AutoTunnel: logging.debug("SIGKILL on autossh PID: %i" % self.cur_pid) try: os.kill( self.cur_pid, signal.SIGKILL ) except OSError: logging.error("No such process: %i" % self.cur_pid) logging.debug("SIGKILL on ssh PID: %i" % tunnel.ssh_pid) try: os.kill( tunnel.ssh_pid, signal.SIGKILL ) except OSError: logging.error("No such process: %i" % tunnel.ssh_pid) self.cur_line = -1 self.cur_pid = -1 # FIXME update cur_pid or get rid of it everywhere return True def do_N(self): """Show connections""" logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("Key pushed: N") self.show_connections = not self.show_connections return True def do_258(self): """Move down""" logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("Key pushed: down") # if not the end of the list if self.cur_line < len(self.tp.tunnels)-1: self.cur_line += 1 # get the pid if type(self.tp.get_tunnel(self.cur_line)) == AutoTunnel: self.cur_pid = self.tp.get_tunnel(self.cur_line).autossh_pid else: self.cur_pid = self.tp.get_tunnel(self.cur_line).ssh_pid return True def do_259(self): """Move up""" logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("Key pushed: up") if self.cur_line > -1: self.cur_line -= 1 if self.cur_line > 0: self.cur_pid = self.tp.get_tunnel(self.cur_line).pid return True def __call__(self): """Start the interface""" self.scr.clear() # clear all self.scr.nodelay(1) # non-bloking getch # first display self.display() # first update counter self.last_update = time.clock() self.last_state = None self.log_ticks = "" # infinite loop notquit = True while(notquit): # wait some time # necessary to not overload the system with unnecessary calls time.sleep( self.ui_delay ) # if its time to update if time.time() > self.last_update + self.update_delay: self.tp.update() # reset the counter self.last_update = time.time() state = "%s" % self.tp if state != self.last_state: logging.debug("Waited: %s" % self.log_ticks) self.log_ticks = "" logging.debug("----- Time of screen update: %s -----" % time.time()) logging.debug("State of tunnels:\n%s" % self.tp) self.last_state = state else: self.log_ticks += "." kc = self.scr.getch() # keycode if kc != -1: # if keypress pass ch = chr(0) if 0 < kc < 256: # if ascii key # ascii character from the keycode ch = chr(kc) # Call the do_* handler. fch = "do_%s" % ch.capitalize() fkc = "do_%i" % kc logging.debug("key func: %s / %s" % (fch,fkc)) if fch in dir(self): notquit = eval("self."+fch+"()") elif fkc in dir(self): notquit = eval("self."+fkc+"()") logging.debug("notquit = %s" % notquit) # update the display self.display() # force a screen refresh self.scr.refresh() # end of the loop def format(self): reps = [self.tp.tunnels[t].repr_tunnel() for t in self.tp.tunnels] tuns = [t.split() for t in reps] tuns.append(self.header) logging.debug(tuns) cols = zip(*tuns) widths = [max(len(s) for s in col) for col in cols] logging.debug(widths) fmt = ['{{: <{}}}'.format(w) for w in widths] logging.debug(fmt) return fmt def display(self): """Generate the interface screen""" # Automagically format help line with available do_* handlers. h = [] for f in dir(self): if "do_" in f: key = f.replace("do_","") if key.isalpha(): # We do not want arrows. msg = "[%s] %s" % (key,eval("self.%s.__doc__" % f)) h.append(msg) help_msg = ", ".join(h) help_msg += "\n" self.scr.addstr(0,0, help_msg, curses.color_pair(4) ) self.scr.clrtoeol() # Second line self.scr.addstr( "Active tunnels: ", curses.color_pair(6) ) self.scr.addstr( str( len(self.tp.tunnels) ), curses.color_pair(1) ) self.scr.addstr( " / Active connections: ", curses.color_pair(6) ) self.scr.addstr( str( sum([len(self.tp.tunnels[t].connections) for t in self.tp.tunnels]) ), curses.color_pair(1) ) self.scr.addstr( '\n', curses.color_pair(1) ) self.scr.clrtoeol() # if no line is selected color = 0 if self.cur_line==-1: # selected color for the header color = 9 self.cur_pid = -1 # header line # header_msg = "TYPE\tINPORT\tVIA \tTARGET \tOUTPORT" # if os.geteuid() == 0: header_msg = " ".join(self.format()).format(*self.header) header_msg += " CONNECTIONS" self.scr.addstr( header_msg, curses.color_pair(color) ) self.scr.clrtoeol() # for each tunnel processes available in the monitor for l in range(len(self.tp.tunnels)): # add a line for the l-th autossh process self.add_tunnel( l ) # if one want to show connections if self.show_connections:# and os.getuid() == 0: self.add_connection( l ) self.scr.clrtobot() def add_connection(self, line ): """Add lines for each connections related to the l-th autossh process""" colors = self.colors_connection # for each connections related to te line-th autossh process for t in sorted(self.tp.get_tunnel(line).connections, key=lambda c:c.status): # FIXME fail if the screen's height is too small. self.scr.addstr( '\n\t+ ' ) color = self.colors_connection['status'] # if the connections is established # TODO avoid hard-coded constants if t.status != 'ESTABLISHED' and t.status != 'LISTEN': color = self.colors_connection['status_out'] self.scr.addstr( t.status, curses.color_pair( color ) ) self.scr.addstr( '\t' ) # self.scr.addstr( str( t['ssh_pid'] ), curses.color_pair(colors['ssh_pid'] ) ) # self.scr.addstr( '\t' ) self.scr.addstr( str( t.local_address ) , curses.color_pair(colors['local_address'] )) self.scr.addstr( ':' ) self.scr.addstr( str( t.in_port ) , curses.color_pair(colors['in_port'] )) if t.foreign_address and t.out_port: self.scr.addstr( ' -> ' ) self.scr.addstr( str( t.foreign_address ) , curses.color_pair(colors['foreign_address'] )) self.scr.addstr( ':' ) self.scr.addstr( str( t.out_port ) , curses.color_pair(colors['out_port'] )) self.scr.clrtoeol() def add_tunnel(self, line): """Add line corresponding to the line-th autossh process""" self.scr.addstr( '\n' ) colors = self.colors_tunnel if self.cur_line == line: colors = self.colors_highlight if type(self.tp.get_tunnel(line)) == AutoTunnel: self.scr.addstr( self.format()[0].format('auto'), curses.color_pair(colors['kind_auto']) ) self.scr.addstr( ' ', curses.color_pair(colors['kind_auto']) ) else: self.scr.addstr( self.format()[0].format('ssh'), curses.color_pair(colors['kind_raw']) ) self.scr.addstr( ' ', curses.color_pair(colors['kind_raw']) ) # self.add_tunnel_info('ssh_pid', line) self.add_tunnel_info('ssh_pid', line, 1) self.add_tunnel_info('in_port', line, 2) self.add_tunnel_info('via_host', line, 3) self.add_tunnel_info('target_host', line, 4) self.add_tunnel_info('out_port', line, 5) nb = len(self.tp.get_tunnel(line).connections ) if nb > 0: # for each connection related to this process for i in self.tp.get_tunnel(line).connections: # add a vertical bar | # the color change according to the status of the connection if i.status == 'ESTABLISHED' or i.status == 'LISTEN': self.scr.addstr( '|', curses.color_pair(self.colors_connection['status']) ) else: self.scr.addstr( '|', curses.color_pair(self.colors_connection['status_out']) ) else: # if os.geteuid() == 0: # if there is no connection, display a "None" self.scr.addstr( 'None', curses.color_pair(self.colors_tunnel['tunnels_nb_none']) ) self.scr.clrtoeol() def add_tunnel_info( self, key, line, col ): """Add an information of an autossh process, in the configured color""" colors = self.colors_tunnel # if the line is selected if self.cur_line == line: # set the color to the highlight one colors = self.colors_highlight txt = eval("str(self.tp.get_tunnel(line).%s)" % key) if key == 'target_host' or key == 'via_host': txt = eval("str(self.tp.get_tunnel(line).%s)" % key) self.scr.addstr(self.format()[col].format(txt), curses.color_pair(colors[key]) ) self.scr.addstr( ' ', curses.color_pair(colors[key]) ) if __name__ == "__main__": import sys from optparse import OptionParser import configparser usage = """%prog [options] A user interface to monitor existing SSH tunnel that are managed with autossh. Called without options, ereshkigal displays a list of tunnels on the standard output. Note: Users other than root will not see tunnels connections. Version 0.3""" parser = OptionParser(usage=usage) parser.add_option("-c", "--curses", action="store_true", default=False, help="Start the user interface in text mode.") parser.add_option("-n", "--connections", action="store_true", default=False, help="Display only SSH connections related to a tunnel.") parser.add_option("-u", "--tunnels", action="store_true", default=False, help="Display only the list of tunnels processes.") LOG_LEVELS = {'error' : logging.ERROR, 'warning' : logging.WARNING, 'debug' : logging.DEBUG} parser.add_option('-l', '--log-level', choices=list(LOG_LEVELS), default='error', metavar='LEVEL', help='Log level (%s), default: %s.' % (", ".join(LOG_LEVELS), 'error') ) parser.add_option('-g', '--log-file', default=None, metavar='FILE', help="Log to this file, default to standard output. \ If you use the curses interface, you may want to set this to actually see logs.") parser.add_option('-f', '--config-file', default=None, metavar='FILE', help="Use this configuration file (default: '~/.ereshkigal.conf')") (asked_for, args) = parser.parse_args() logmsg = "----- Started Ereshkigal -----" if asked_for.log_file: logfile = asked_for.log_file logging.basicConfig(filename=logfile, level=LOG_LEVELS[asked_for.log_level]) logging.debug(logmsg) logging.debug("Log in %s" % logfile) else: if asked_for.curses: logging.warning("It's a bad idea to log to stdout while in the curses interface.") logging.basicConfig(level=LOG_LEVELS[asked_for.log_level]) logging.debug(logmsg) logging.debug("Log to stdout") logging.debug("Asked for: %s" % asked_for) # unfortunately, asked_for class has no __len__ method in python 2.4.3 (bug?) #if len(asked_for) > 1: # parser.error("asked_for are mutually exclusive") config = configparser.ConfigParser() if asked_for.config_file: try: config.read(asked_for.config_file) except configparser.MissingSectionHeaderError: logging.error("'%s' contains no known configuration" % asked_for.config_file) else: try: config.read('~/.ereshkigal.conf') except configparser.MissingSectionHeaderError: logging.error("'%s' contains no known configuration" % asked_for.config_file) # Load autossh instances by sections: [expected] # if config['expected']: if asked_for.curses: logging.debug("Entering curses mode") import curses import traceback try: scr = curses.initscr() curses.start_color() # 0:black, 1:red, 2:green, 3:yellow, 4:blue, 5:magenta, 6:cyan, 7:white curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK) curses.init_pair(3, curses.COLOR_YELLOW, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_BLUE, curses.COLOR_BLACK) curses.init_pair(5, curses.COLOR_MAGENTA, curses.COLOR_BLACK) curses.init_pair(6, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(7, curses.COLOR_WHITE, curses.COLOR_BLACK) curses.init_pair(8, curses.COLOR_WHITE, curses.COLOR_GREEN) curses.init_pair(9, curses.COLOR_WHITE, curses.COLOR_BLUE) curses.noecho() curses.cbreak() scr.keypad(1) # create the monitor mc = CursesMonitor( scr ) # call the monitor mc() scr.keypad(0) curses.echo() curses.nocbreak() curses.endwin() except: # end cleanly scr.keypad(0) curses.echo() curses.nocbreak() curses.endwin() # print the traceback traceback.print_exc() elif asked_for.connections: logging.debug("Entering connections mode") tp = TunnelsParser() tp.update() # do not call update() but only get connections logging.debug("UID: %i." % os.geteuid()) # if os.geteuid() == 0: for t in tp.tunnels: for c in tp.tunnels[t].connections: print(tp.tunnels[t].ssh_pid, c) # else: # logging.error("Only root can see SSH tunnels connections.") elif asked_for.tunnels: logging.debug("Entering tunnel mode") tp = TunnelsParser() tp.update() # do not call update() bu only get autossh processes print(tp.header) for t in tp.tunnels: print(tp.tunnels[t].repr_tunnel()) else: logging.debug("Entering default mode") tp = TunnelsParser() # call update tp.update() # call the default __repr__ print(tp) # # In Mesopotamian mythology, Ereshkigal (lit. "great lady under earth") # was the goddess of Irkalla, the land of the dead or underworld. # # Thus, she knows a lot about tunnels... # # http://en.wikipedia.org/wiki/Ereshkigal #
gpl-3.0
-8,461,380,079,873,353,000
33.924202
169
0.545025
false
3.791396
true
false
false
greatguy45/webcrawler
selenium_dropdown_control.py
1
1594
from selenium import webdriver import requests from bs4 import BeautifulSoup page = requests.get("https://www.se.gob.ar/datosupstream/graf_prod_x_pozo.php?gas=1&ejecutar=1&vienede=&idpozo=") soup = BeautifulSoup(page.content, 'html.parser') #get all the year options year=soup.find("select",{"name":"anio"}) year_options = year.find_all("option") options1=[y.text for y in year_options] year_values = [o.get("value") for o in year_options] print ("list of all year available\n") for x in range(1,len(options1)): print (options1[x], year_values[x]) #get all the province options province=soup.find("select",{"name":"provincia"}) province_options = province.find_all("option") options2=[y.text for y in province_options] province_values = [o.get("value") for o in province_options] print ("list of all province available\n") for x in range(1,len(options2)): print (options2[x], province_values[x]) #get all the yacimiento options yacimiento=soup.find("select",{"name":"yacimiento"}) yacimiento_options = yacimiento.find_all("option") options3=[y.text for y in yacimiento_options] yacimiento_values = [o.get("value") for o in yacimiento_options] print ("list of all formation available\n") for x in range(1,len(options3)): print (options3[x], yacimiento_values[x]) #get all the pozo options pozo=soup.find("select",{"name":"pozo"}) pozo_options = pozo.find_all("option") options4=[y.text for y in pozo_options] pozo_values = [o.get("value") for o in pozo_options] print ("list of all pozo available\n") for x in range(1,len(options4)): print (options4[x], pozo_values[x])
gpl-3.0
-3,977,733,957,514,390,500
33.652174
113
0.723965
false
2.796491
false
false
false
dpgaspar/Flask-AppBuilder
examples/simpleform/config.py
1
3697
import os from flask_appbuilder.security.manager import ( AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH, ) basedir = os.path.abspath(os.path.dirname(__file__)) # Your App secret key SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h" # The SQLAlchemy connection string. SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(basedir, "app.db") # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp' # SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp' # Flask-WTF flag for CSRF CSRF_ENABLED = True # ------------------------------ # GLOBALS FOR APP Builder # ------------------------------ # Uncomment to setup Your App name # APP_NAME = "My App Name" # Uncomment to setup Setup an App icon # APP_ICON = "static/img/logo.jpg" # ---------------------------------------------------- # AUTHENTICATION CONFIG # ---------------------------------------------------- # The authentication type # AUTH_OID : Is for OpenID # AUTH_DB : Is for database (username/password() # AUTH_LDAP : Is for LDAP # AUTH_REMOTE_USER : Is for using REMOTE_USER from web server AUTH_TYPE = AUTH_DB # Uncomment to setup Full admin role name # AUTH_ROLE_ADMIN = 'Admin' # Uncomment to setup Public role name, no authentication needed # AUTH_ROLE_PUBLIC = 'Public' # Will allow user self registration # AUTH_USER_REGISTRATION = True # The default user self registration role # AUTH_USER_REGISTRATION_ROLE = "Public" # When using LDAP Auth, setup the ldap server # AUTH_LDAP_SERVER = "ldap://ldapserver.new" # AUTH_LDAP_USE_TLS = False # Uncomment to setup OpenID providers example for OpenID authentication # OPENID_PROVIDERS = [ # { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' }, # { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' }, # { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' }, # { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }] # --------------------------------------------------- # Babel config for translations # --------------------------------------------------- # Setup default language BABEL_DEFAULT_LOCALE = "en" # Your application default translation path BABEL_DEFAULT_FOLDER = "translations" # The allowed translation for you app LANGUAGES = { "en": {"flag": "gb", "name": "English"}, "pt": {"flag": "pt", "name": "Portuguese"}, "pt_BR": {"flag": "br", "name": "Pt Brazil"}, "es": {"flag": "es", "name": "Spanish"}, "de": {"flag": "de", "name": "German"}, "zh": {"flag": "cn", "name": "Chinese"}, "ru": {"flag": "ru", "name": "Russian"}, "pl": {"flag": "pl", "name": "Polish"}, } # --------------------------------------------------- # Image and file configuration # --------------------------------------------------- # The file upload folder, when using models with files UPLOAD_FOLDER = basedir + "/app/static/uploads/" # The image upload folder, when using models with images IMG_UPLOAD_FOLDER = basedir + "/app/static/uploads/" # The image upload url, when using models with images IMG_UPLOAD_URL = "/static/uploads/" # Setup image size default is (300, 200, True) # IMG_SIZE = (300, 200, True) # Theme configuration # these are located on static/appbuilder/css/themes # you can create your own and easily use them placing them on the same dir structure to override # APP_THEME = "bootstrap-theme.css" # default bootstrap # APP_THEME = "cerulean.css" # APP_THEME = "amelia.css" # APP_THEME = "cosmo.css" # APP_THEME = "cyborg.css" # APP_THEME = "flatly.css" # APP_THEME = "journal.css" # APP_THEME = "readable.css" # APP_THEME = "simplex.css" # APP_THEME = "slate.css" # APP_THEME = "spacelab.css" # APP_THEME = "united.css" # APP_THEME = "yeti.css"
bsd-3-clause
-7,382,459,781,208,628,000
32.306306
96
0.603192
false
3.257269
false
false
false
mganeva/mantid
scripts/Reflectometry/isis_reflectometry/settings.py
1
2193
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin # SPDX - License - Identifier: GPL - 3.0 + from __future__ import (absolute_import, division, print_function) import xml.etree.ElementTree as XML import os.path class MissingSettings(Exception): pass class Settings(object): __contents = None __filename = None def __init__(self, filename = None): self.__filename = filename if not filename: filename = os.path.join( os.path.dirname(os.path.realpath(__file__)), "settings.xml") self.__check_file(filename) doc = None try: tree = XML.parse(filename) doc = tree.getroot() self.__extract_to_dictionary(doc) except: raise ValueError("The file %s does not contain valid XML" % filename) def __check_file(self, filename): path, extension = os.path.splitext(filename) if extension.lower().strip() != ".xml": raise ValueError("Wrong file extension. *.xml expected not %s." % extension) if not os.path.isfile(filename): ''' Deliberately swallow and re-throw at this point. Consise reinterpreted error, will be much nicer for client code.''' raise MissingSettings("Settings file %s does not exist so no manual settings will be applied." % filename) def __extract_to_dictionary(self, doc): temp = dict() for elem in doc: key = elem.attrib.get('name').strip() value = elem.text.strip() if not key: raise ValueError("Missing name attribute on Setting element") if not value: raise ValueError("Missing value for Setting element") temp[key] = value self.__contents = dict(frozenset(list(temp.items()))) def get_all_entries(self): return self.__contents def get_named_setting(self, name): return self.__contents[name] def get_contents_file(self): return self.__filename
gpl-3.0
-1,148,494,473,465,273,000
33.809524
132
0.620155
false
4.145558
false
false
false
krzjoa/sciquence
sciquence/representation/sax.py
1
3735
# -*- coding: utf-8 -*- # Krzysztof Joachimiak 2017 # sciquence: Time series & sequences in Python # # Symbolic Aggregate Approximation # Author: Krzysztof Joachimiak # # License: MIT import numpy as np import scipy.stats from sklearn.preprocessing import scale, StandardScaler from paa import paa from operator import add def sax(sequence, window, alphabet_size=5, adjust=True): ''' Symbolic Aggregate Approximation. Transform time series into a string. Parameters ---------- sequence: numpy.ndarray One-dimensional numpy array of arbitrary length window: int Length of sliding window alphabet_size: int Number of Gaussian breakpoints adjust: bool, default True Compute only for equal-size chunks Returns ------- sax_representation: str A SAX representation Examples -------- >>> import numpy as np >>> from sciquence.representation import sax >>> np.random.seed(42) >>> random_time_series = np.random.rand(50) >>> print sax(random_time_series, 10, alphabet_size=5) dcccc References ---------- .. [1] Lin, J., Keogh, E., Lonardi, S., & Chiu, B. (2003). A Symbolic Representation of Time Series, with Implications for Streaming Algorithms. In proceedings of the 8th ACM SIGMOD Workshop on Research Issues in Data Mining and Knowledge Discovery. http://www.cs.ucr.edu/~eamonn/SAX.pdf .. [2] http://www.cs.ucr.edu/~eamonn/SAX.htm .. [3] https://jmotif.github.io/sax-vsm_site/morea/algorithm/SAX.html ''' # TODO: check dimensionality, size, aphabet size etc. # Pre-step: checking if all arguments have proper values # First step: Standardization ( aka normalization, z-normalization or standard score) scaled = scale(sequence) # Second step: PAA paa_repr = paa(scaled, window=window, adjust=adjust) # Last step: breakpoints = gauss_breakpoints(alphabet_size) letters = _alphabet(alphabet_size) breakpoints= np.array(breakpoints) symbols = np.array(letters) return reduce(add, symbols[np.digitize(paa_repr, breakpoints)]) # =========== SAX object ============ # # TODO: consider: some classes should be both transformers and processors class SAX(object): def __init__(self, n_ranges=5, keep_scale=True): self.scaler = StandardScaler() self.breakpoints = gauss_breakpoints(n_ranges) def fit(self, X, y): return self def transform(self, X, y): pass def fit_transform(self, X, y): return self.fit(X, y).transform(X, y) # ================ UTILS ================ # def gauss_breakpoints(n_ranges): # TODO: move this function to utilities ''' Get quantiles of Gaussian distribution. Parameters ---------- n_ranges: int Number of equal ranges in Gaussian distribution Returns ------- breakpoints: list of float List of Gaussian quantiles ''' quantile_range = 1. / n_ranges quantiles = [quantile_range*i for i in range(1, n_ranges)] return [round(scipy.stats.norm.ppf(q), 2) for q in quantiles] def _alphabet(n_letters): import string return np.array(list(string.ascii_lowercase)[:n_letters]) def get_bins(sequence, breakpoints, symbols): breakpoints= np.array(breakpoints) symbols = np.array(symbols) return np.digitize(sequence, breakpoints)[symbols] if __name__ == '__main__': # rts = np.random.rand(20)*10 # saxed = sax(rts, 3) # print saxed #print gauss_breakpoints(10) #import scipy.stats #print scipy.stats.norm.ppf(1. / 3) random_ts = np.random.rand(30, 100) print random_ts
mit
257,375,761,614,011,420
24.236486
89
0.638554
false
3.647461
false
false
false
luster/oldnyc
generate_static_site.py
1
5564
#!/usr/bin/env python '''Generate a static version of oldnyc.org consisting entirely of JSON.''' import chardet from collections import defaultdict, OrderedDict import csv import json import record import re from distutils.dir_util import copy_tree from shutil import copyfile import subprocess import sys import time import os from ocr import cleaner import title_cleaner # Make sure the oldnyc.github.io repo is in a clean state. git_status = subprocess.check_output('git -C ../oldnyc.github.io status --porcelain'.split(' ')) if git_status.strip(): sys.stderr.write('Make sure the ../oldnyc.github.io repo exists and is clean.\n') sys.exit(1) # strip leading 'var popular_photos = ' and trailing ';' popular_photos = json.loads(open('viewer/static/js/popular-photos.js', 'rb').read()[20:-2]) pop_ids = {x['id'] for x in popular_photos} # strip leading 'var lat_lons = ' and trailing ';' lat_lon_to_ids = json.loads(open('viewer/static/js/nyc-lat-lons-ny.js', 'rb').read()[15:-1]) rs = record.AllRecords('nyc/photos.pickle') id_to_record = {r.photo_id(): r for r in rs} id_to_dims = {} for photo_id, width, height in csv.reader(open('nyc-image-sizes.txt')): id_to_dims[photo_id] = (int(width), int(height)) # rotated images based on user feedback user_rotations = json.load(open('analysis/rotations/rotations.json')) id_to_rotation = user_rotations['fixes'] # Load the previous iteration of OCR. Corrections are applied on top of # this. old_data = json.load(open('../oldnyc.github.io/data.json', 'rb')) old_photo_id_to_text = {r['photo_id']: r['text'] for r in old_data['photos'] if r['text']} manual_ocr_fixes = json.load(open('ocr/feedback/fixes.json', 'rb')) back_id_to_correction = manual_ocr_fixes['fixes'] id_to_text = {} for photo_id in id_to_record.iterkeys(): back_id = re.sub(r'f?(?:-[a-z])?$', 'b', photo_id) if photo_id in old_photo_id_to_text: id_to_text[photo_id] = old_photo_id_to_text[photo_id] if back_id in back_id_to_correction: id_to_text[photo_id] = back_id_to_correction[back_id] # (This was only helpful on the initial run, when data came straight from # Ocropus.) # for k, txt in id_to_text.iteritems(): # id_to_text[k] = cleaner.clean(txt) back_id_to_text = None # clear def image_url(photo_id, is_thumb): degrees = id_to_rotation.get(photo_id) if not degrees: return 'http://oldnyc-assets.nypl.org/%s/%s.jpg' % ( 'thumb' if is_thumb else '600px', photo_id) else: return 'http://www.oldnyc.org/rotated-assets/%s/%s.%s.jpg' % ( 'thumb' if is_thumb else '600px', photo_id, degrees) def decode(b): try: return b.decode('utf8') except UnicodeDecodeError: return b.decode(chardet.detect(b)['encoding']) def make_response(photo_ids): response = OrderedDict() for photo_id in photo_ids: r = id_to_record[photo_id] w, h = id_to_dims[photo_id] ocr_text = id_to_text.get(photo_id) # See also viewer/app.py title = decode(r.title()) original_title = None if title_cleaner.is_pure_location(title): original_title = title title = '' assert r.description() == '' assert r.note() == '' rotation = id_to_rotation.get(photo_id) if rotation and (rotation % 180 == 90): w, h = h, w response[photo_id] = { 'title': title, 'date': re.sub(r'\s+', ' ', r.date()), 'folder': decode(r.location()), 'width': w, 'height': h, 'text': ocr_text, 'image_url': image_url(photo_id, is_thumb=False), 'thumb_url': image_url(photo_id, is_thumb=True) } if original_title: response[photo_id]['original_title'] = original_title if rotation: response[photo_id]['rotation'] = rotation return response all_photos = [] latlon_to_count = {} id4_to_latlon = defaultdict(lambda: {}) # first 4 of id -> id -> latlon for latlon, photo_ids in lat_lon_to_ids.iteritems(): outfile = '../oldnyc.github.io/by-location/%s.json' % latlon.replace(',', '') response = make_response(photo_ids) latlon_to_count[latlon] = len(response) json.dump(response, open(outfile, 'wb'), indent=2) for id_ in photo_ids: id4_to_latlon[id_[:4]][id_] = latlon for photo_id, response in response.iteritems(): lat, lon = [float(x) for x in latlon.split(',')] response['photo_id'] = photo_id response['location'] = { 'lat': lat, 'lon': lon } response['width'] = int(response['width']) response['height'] = int(response['height']) all_photos.append(response) json.dump(make_response(pop_ids), open('../oldnyc.github.io/popular.json', 'wb'), indent=2) with open('../oldnyc.github.io/lat-lon-counts.js', 'wb') as f: f.write('var lat_lons = %s;' % json.dumps(latlon_to_count, indent=2)) for id4, id_to_latlon in id4_to_latlon.iteritems(): json.dump(id_to_latlon, open('../oldnyc.github.io/id4-to-location/%s.json' % id4, 'wb'), indent=2) # Complete data dump all_photos.sort(key=lambda photo: photo['photo_id']) json.dump({ 'photos': all_photos, 'timestamp': time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), 'rotation_time': user_rotations['last_date'], 'ocr_time': manual_ocr_fixes['last_date'] }, open('../oldnyc.github.io/data.json', 'wb'), indent=2)
apache-2.0
-9,218,727,179,228,314,000
32.721212
96
0.612689
false
3.103179
false
false
false
VisualComputingInstitute/TrackR-CNN
datasets/Mapillary/Mapillary_instance.py
1
1930
from datasets.Loader import register_dataset from datasets.Mapillary.MapillaryLike_instance import MapillaryLikeInstanceDataset from datasets.util.Util import username DEFAULT_PATH = "/fastwork/" + username() + "/mywork/data/mapillary/" NAME = "mapillary_instance" @register_dataset("mapillary_instance_full", resolution="full") @register_dataset("mapillary_instance_half", resolution="half") @register_dataset("mapillary_instance_quarter", resolution="quarter") class MapillaryInstanceDataset(MapillaryLikeInstanceDataset): def __init__(self, config, subset, resolution): assert resolution in ("quarter", "half", "full"), resolution if resolution == "full": default_path = DEFAULT_PATH else: default_path = DEFAULT_PATH.replace("/mapillary/", "/mapillary_{}/".format(resolution)) # there are 37 classes with instances in total # we excluded the following: # 8: construction--flat--crosswalk-plain -> doesn't really look like a useful object category # 34: object--bike-rack -> holes* # 45: object--support--pole -> very large and thin -> bounding box does not capture it well # 46: object--support--traffic-sign-frame -> holes* # 47: object--support--utility-pole -> holes* # further candidate for exclusion: # 0: animal--bird -> usually very small # *: holes means that there are large "holes" in the object which usually are still annotated as part of the object # this will not work well together with laser, so we exclude them vehicle_ids = [52, 53, 54, 55, 56, 57, 59, 60, 61, 62] human_ids = [19, 20, 21, 22] animal_ids = [0, 1] object_ids = [32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 44, 48, 49, 50, 51] crosswalk_zebra_id = [23] cat_ids_to_use = vehicle_ids + human_ids + animal_ids + object_ids + crosswalk_zebra_id super().__init__(config, subset, NAME, default_path, "datasets/Mapillary/", 256, cat_ids_to_use)
mit
4,730,217,750,433,129,000
46.073171
119
0.691192
false
3.509091
false
false
false
evamy/learning_to_see
VAProject/eyes.py
1
1174
""" @author: Antriksh Agarwal Version 0: 04/29/2018 """ import cv2 import numpy as np from utils import * import time eyeCascade = cv2.CascadeClassifier('models/eyes.xml') def detect_eyes(image): image = cv2.resize(image, (0, 0), fx=4, fy=4) # start = time.time() eyes = eyeCascade.detectMultiScale( image, scaleFactor=2.5, minNeighbors=5) # print "Eye Time: ", time.time() - start eyes = non_max_suppression(eyes, overlapThresh=0.5) # cv2.rectangle(image, (x, y), (x + w, y + h), (69, 165, 255), 2) return eyes def video_capture(): cap = cv2.VideoCapture(0) while(1): # for frame in imgs: _, frame = cap.read() # frame = cv2.imread(frame) image = cv2.resize(frame, (0, 0), fx=0.5, fy=0.5) eyes = detect_eyes(image) for eye in eyes: (xe, ye, we, he) = eye cv2.rectangle(image, (xe, ye), (xe + we, ye + he), (255, 0, 255), 3) cv2.imshow("Eye detection", image) # cv2.waitKey(0) if cv2.waitKey(1) & 0xFF == ord('q'): break if __name__ == '__main__': video_capture()
gpl-3.0
7,997,997,112,329,743,000
21.576923
73
0.539182
false
2.863415
false
false
false
avr-aics-riken/SURFACE
glsl/sexps.py
1
4142
# coding=utf-8 # # Copyright © 2011 Intel Corporation # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice (including the next # paragraph) shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # This file contains helper functions for manipulating sexps in Python. # # We represent a sexp in Python using nested lists containing strings. # So, for example, the sexp (constant float (1.000000)) is represented # as ['constant', 'float', ['1.000000']]. import re def check_sexp(sexp): """Verify that the argument is a proper sexp. That is, raise an exception if the argument is not a string or a list, or if it contains anything that is not a string or a list at any nesting level. """ if isinstance(sexp, list): for s in sexp: check_sexp(s) elif not isinstance(sexp, basestring): raise Exception('Not a sexp: {0!r}'.format(sexp)) def parse_sexp(sexp): """Convert a string, of the form that would be output by mesa, into a sexp represented as nested lists containing strings. """ #sexp_token_regexp = re.compile( # '[a-zA-Z_]+(@[0-9]+)?|[0-9]+(\\.[0-9]+)?|[^ \n]') # symbol may include '@' sexp_token_regexp = re.compile( '[a-zA-Z_][a-zA-Z_@0-9]*|[-+]*[0-9]+(\\.[0-9]+)?|[\|\&\+\-<>=]+|[^ \r\n]') stack = [[]] for match in sexp_token_regexp.finditer(sexp): token = match.group(0) if token == '\t': continue # skip elif token == '(': stack.append([]) elif token == ')': if len(stack) == 1: raise Exception('Unmatched )') sexp = stack.pop() stack[-1].append(sexp) else: # escape '@' to '__' t = re.sub('@', '__', token) stack[-1].append(t) if len(stack) != 1: raise Exception('Unmatched (') if len(stack[0]) != 1: # flatten last element sexp = stack[0].pop() for exp in sexp: stack[0].append(exp) return stack[0] else: return stack[0][0] def sexp_to_string(sexp): """Convert a sexp, represented as nested lists containing strings, into a single string of the form parseable by mesa. """ if isinstance(sexp, basestring): return sexp assert isinstance(sexp, list) result = '' for s in sexp: sub_result = sexp_to_string(s) if result == '': result = sub_result elif '\n' not in result and '\n' not in sub_result and \ len(result) + len(sub_result) + 1 <= 70: result += ' ' + sub_result else: result += '\n' + sub_result return '({0})'.format(result.replace('\n', '\n ')) def sort_decls(sexp): """Sort all toplevel variable declarations in sexp. This is used to work around the fact that ir_reader::read_instructions reorders declarations. """ assert isinstance(sexp, list) decls = [] other_code = [] for s in sexp: if isinstance(s, list) and len(s) >= 4 and s[0] == 'declare': decls.append(s) else: other_code.append(s) return sorted(decls) + other_code
bsd-2-clause
3,785,125,142,359,206,000
33.798319
82
0.612171
false
3.757713
false
false
false
nischalsheth/contrail-controller
src/config/common/utils.py
1
6932
#!/usr/bin/python # -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2015 Juniper Networks # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Numan Siddique, eNovance. import os import errno import urllib from collections import OrderedDict import sys import cStringIO import logging from ConfigParser import NoOptionError from cfgm_common import vnc_cgitb _DEFAULT_USER_DOMAIN_NAME = 'Default' _DEFAULT_DOMAIN_ID = 'default' def cgitb_hook(info=None, **kwargs): vnc_cgitb.Hook(**kwargs).handle(info or sys.exc_info()) # end cgitb_hook def detailed_traceback(): buf = cStringIO.StringIO() cgitb_hook(format="text", file=buf) tb_txt = buf.getvalue() buf.close() return tb_txt # end detailed_traceback def encode_string(enc_str, encoding='utf-8'): """Encode the string using urllib.quote_plus Eg. @input: enc_str = 'neté type - 'unicode' or 'str' @retval enc_str = 'net%C3%A9%C3%B9' type - str """ try: enc_str.encode() except (UnicodeDecodeError, UnicodeEncodeError): if type(enc_str) is unicode: enc_str = enc_str.encode(encoding) enc_str = urllib.quote_plus(enc_str) except Exception: pass return enc_str def decode_string(dec_str, encoding='utf-8'): """Decode the string previously encoded using urllib.quote_plus. Eg. If dec_str = 'net%C3%A9%C3%B9' type - 'unicode' or 'str' @retval ret_dec_str = 'neté type - unicode """ ret_dec_str = dec_str try: if type(ret_dec_str) is unicode: ret_dec_str = str(ret_dec_str) ret_dec_str = urllib.unquote_plus(ret_dec_str) return ret_dec_str.decode(encoding) except Exception: return dec_str class CacheContainer(object): def __init__(self, size): self.container_size = size self.dictionary = OrderedDict() def __getitem__(self, key, default=None): value = self.dictionary[key] # item accessed - put it in the front del self.dictionary[key] self.dictionary[key] = value return value def __setitem__(self, key, value): self.dictionary[key] = value if len(self.dictionary.keys()) > self.container_size: # container is full, loose the least used item self.dictionary.popitem(last=False) def __contains__(self, key): return key in self.dictionary def __repr__(self): return str(self.dictionary) def CamelCase(input): words = input.replace('_', '-').split('-') name = '' for w in words: name += w.capitalize() return name # end CamelCase def str_to_class(class_name, module_name): try: return reduce(getattr, class_name.split("."), sys.modules[module_name]) except Exception as e: logger = logging.getLogger(module_name) logger.warn("Exception: %s", str(e)) return None # end str_to_class def obj_type_to_vnc_class(obj_type, module_name): return str_to_class(CamelCase(obj_type), module_name) # end obj_type_to_vnc_class def getCertKeyCaBundle(bundle, certs): if os.path.isfile(bundle): # Check if bundle needs to be replaced if # constituent files were updated bundle_is_stale = False bundle_mod_time = os.path.getmtime(bundle) for cert in certs: if os.path.getmtime(cert) > bundle_mod_time: bundle_is_stale = True break if not bundle_is_stale: return bundle try: os.makedirs(os.path.dirname(bundle)) except OSError as e: if e.errno != errno.EEXIST: raise with open(bundle, 'w') as ofile: for cert in certs: with open(cert) as ifile: for line in ifile: ofile.write(line) os.chmod(bundle,0o777) return bundle # end CreateCertKeyCaBundle # <uuid> | "tenant-"<uuid> | "domain-"<uuid> def shareinfo_from_perms2_tenant(field): x = field.split(":") if len(x) == 1: x.insert(0, "tenant") return x # end def shareinfo_from_perms2(field): x = field.split(":") if len(x) == 2: x.insert(0, "tenant") return x # end def compare_refs(old_refs, new_refs): # compare refs in an object old_ref_dict = dict((':'.join(ref['to']), ref.get('attr')) for ref in old_refs or []) new_ref_dict = dict((':'.join(ref['to']), ref.get('attr')) for ref in new_refs or []) return old_ref_dict == new_ref_dict # end compare_refs def get_arg(args, name, default=None): try: kwarg = {name: eval('args.%s' % name)} except AttributeError: try: kwarg = {name: args.get('KEYSTONE', name)} except (NoOptionError, AttributeError): kwarg = {name: default} return kwarg # end get_arg def get_user_domain_kwargs(args): user_domain = get_arg(args, 'user_domain_id') if not user_domain.get('user_domain_id'): user_domain = get_arg(args, 'user_domain_name', _DEFAULT_USER_DOMAIN_NAME) return user_domain # end get_user_domain_kwargs def get_project_scope_kwargs(args): scope_kwargs = {} project_domain_name = get_arg(args, 'project_domain_name') project_domain_id = get_arg(args, 'project_domain_id') if project_domain_name.get('project_domain_name'): # use project domain name scope_kwargs.update(**project_domain_name) elif project_domain_id.get('project_domain_id'): # use project domain id scope_kwargs.update(**project_domain_id) if scope_kwargs: admin_tenant_name = get_arg(args, 'admin_tenant_name')['admin_tenant_name'] project_name = get_arg(args, 'project_name', admin_tenant_name) scope_kwargs.update(project_name) return scope_kwargs # end get_project_scope_kwargs def get_domain_scope_kwargs(args): scope_kwargs = {} domain_name = get_arg(args, 'domain_name') domain_id = get_arg(args, 'domain_id', _DEFAULT_DOMAIN_ID) if domain_name.get('domain_name'): # use domain name scope_kwargs.update(**domain_name) elif domain_id.get('domain_id'): # use domain id scope_kwargs.update(**domain_id) return scope_kwargs # end get_domain_scope_kwargs
apache-2.0
4,898,338,699,762,080,000
27.285714
89
0.622655
false
3.505311
false
false
false
chop-dbhi/varify-data-warehouse
vdw/variants/migrations/0004_auto__chg_field_evs_aa_ac_alt__chg_field_evs_ea_ac_alt__chg_field_evs_.py
1
18632
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'EVS.aa_ac_alt' db.alter_column('evs', 'aa_ac_alt', self.gf('django.db.models.fields.CharField')(max_length=20, null=True)) # Changing field 'EVS.ea_ac_alt' db.alter_column('evs', 'ea_ac_alt', self.gf('django.db.models.fields.CharField')(max_length=20, null=True)) # Changing field 'EVS.aa_ac_ref' db.alter_column('evs', 'aa_ac_ref', self.gf('django.db.models.fields.CharField')(max_length=20, null=True)) # Changing field 'EVS.all_ac_ref' db.alter_column('evs', 'all_ac_ref', self.gf('django.db.models.fields.CharField')(max_length=20, null=True)) # Changing field 'EVS.all_ac_alt' db.alter_column('evs', 'all_ac_alt', self.gf('django.db.models.fields.CharField')(max_length=20, null=True)) def backwards(self, orm): # Changing field 'EVS.aa_ac_alt' db.alter_column('evs', 'aa_ac_alt', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True)) # Changing field 'EVS.ea_ac_alt' db.alter_column('evs', 'ea_ac_alt', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True)) # Changing field 'EVS.aa_ac_ref' db.alter_column('evs', 'aa_ac_ref', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True)) # Changing field 'EVS.all_ac_ref' db.alter_column('evs', 'all_ac_ref', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True)) # Changing field 'EVS.all_ac_alt' db.alter_column('evs', 'all_ac_alt', self.gf('django.db.models.fields.IntegerField')(max_length=20, null=True)) models = { 'genes.exon': { 'Meta': {'object_name': 'Exon', 'db_table': "'exon'"}, 'end': ('django.db.models.fields.IntegerField', [], {}), 'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'index': ('django.db.models.fields.IntegerField', [], {}), 'start': ('django.db.models.fields.IntegerField', [], {}) }, 'genes.gene': { 'Meta': {'object_name': 'Gene', 'db_table': "'gene'"}, 'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'gene_pubmed'", 'symmetrical': 'False'}), 'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}), 'families': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.GeneFamily']", 'symmetrical': 'False', 'blank': 'True'}), 'hgnc_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['genes.GenePhenotype']", 'symmetrical': 'False'}), 'symbol': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Synonym']", 'db_table': "'gene_synonym'", 'symmetrical': 'False'}) }, 'genes.genefamily': { 'Meta': {'object_name': 'GeneFamily', 'db_table': "'gene_family'"}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'tag': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}) }, 'genes.genephenotype': { 'Meta': {'object_name': 'GenePhenotype', 'db_table': "'gene_phenotype'"}, 'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}), 'hgmd_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"}) }, 'genes.synonym': { 'Meta': {'object_name': 'Synonym', 'db_table': "'synonym'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}) }, 'genes.transcript': { 'Meta': {'object_name': 'Transcript', 'db_table': "'transcript'"}, 'coding_end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'coding_end_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), 'coding_start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'coding_start_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), 'end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'exon_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'exons': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Exon']", 'db_table': "'transcript_exon'", 'symmetrical': 'False'}), 'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'refseq_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}), 'start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'strand': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}) }, 'genome.chromosome': { 'Meta': {'ordering': "['order']", 'object_name': 'Chromosome', 'db_table': "'chromosome'"}, 'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '2'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}) }, 'literature.pubmed': { 'Meta': {'object_name': 'PubMed', 'db_table': "'pubmed'"}, 'pmid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}) }, 'phenotypes.phenotype': { 'Meta': {'object_name': 'Phenotype', 'db_table': "'phenotype'"}, 'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'symmetrical': 'False'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'hpo_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'term': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1000'}) }, 'variants.effect': { 'Meta': {'ordering': "['order']", 'object_name': 'Effect', 'db_table': "'effect'"}, 'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'impact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectImpact']", 'null': 'True', 'blank': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectRegion']", 'null': 'True', 'blank': 'True'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'variants.effectimpact': { 'Meta': {'ordering': "['order']", 'object_name': 'EffectImpact', 'db_table': "'effect_impact'"}, 'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'variants.effectregion': { 'Meta': {'ordering': "['order']", 'object_name': 'EffectRegion', 'db_table': "'effect_region'"}, 'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'variants.evs': { 'Meta': {'object_name': 'EVS', 'db_table': "'evs'"}, 'aa_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}), 'aa_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}), 'aa_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'aa_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'all_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}), 'all_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}), 'all_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'all_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'clinical_association': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'ea_ac_alt': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}), 'ea_ac_ref': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}), 'ea_gtc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'ea_maf': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'gts': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'read_depth': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'evs'", 'to': "orm['variants.Variant']"}) }, 'variants.functionalclass': { 'Meta': {'ordering': "['order']", 'object_name': 'FunctionalClass', 'db_table': "'variant_functional_class'"}, 'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'variants.polyphen2': { 'Meta': {'object_name': 'PolyPhen2', 'db_table': "'polyphen2'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'refaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}), 'score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polyphen2'", 'to': "orm['variants.Variant']"}) }, 'variants.sift': { 'Meta': {'object_name': 'Sift', 'db_table': "'sift'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'refaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}), 'score': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'varaa': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}), 'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sift'", 'to': "orm['variants.Variant']"}) }, 'variants.thousandg': { 'Meta': {'object_name': 'ThousandG', 'db_table': "'1000g'"}, 'aa': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'ac': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'afr_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'amr_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'an': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'asn_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'eur_af': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'variant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'thousandg'", 'to': "orm['variants.Variant']"}) }, 'variants.variant': { 'Meta': {'unique_together': "(('chr', 'pos', 'ref', 'alt'),)", 'object_name': 'Variant', 'db_table': "'variant'"}, 'alt': ('django.db.models.fields.TextField', [], {'db_index': 'True'}), 'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'variant_pubmed'", 'symmetrical': 'False'}), 'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'liftover': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'md5': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['variants.VariantPhenotype']", 'symmetrical': 'False'}), 'pos': ('django.db.models.fields.IntegerField', [], {}), 'ref': ('django.db.models.fields.TextField', [], {'db_index': 'True'}), 'rsid': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.VariantType']", 'null': 'True'}) }, 'variants.varianteffect': { 'Meta': {'object_name': 'VariantEffect', 'db_table': "'variant_effect'"}, 'amino_acid_change': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'codon_change': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'effect': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.Effect']", 'null': 'True', 'blank': 'True'}), 'exon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Exon']", 'null': 'True', 'blank': 'True'}), 'functional_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.FunctionalClass']", 'null': 'True', 'blank': 'True'}), 'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'transcript': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Transcript']", 'null': 'True', 'blank': 'True'}), 'variant': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'effects'", 'null': 'True', 'to': "orm['variants.Variant']"}) }, 'variants.variantphenotype': { 'Meta': {'object_name': 'VariantPhenotype', 'db_table': "'variant_phenotype'"}, 'hgmd_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"}), 'variant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.Variant']"}) }, 'variants.varianttype': { 'Meta': {'ordering': "['order']", 'object_name': 'VariantType', 'db_table': "'variant_type'"}, 'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '20'}) } } complete_apps = ['variants']
bsd-2-clause
-7,884,365,182,716,240,000
75.04898
192
0.543581
false
3.461267
false
false
false
dkrisman/Traipse
mercurial/portable_hgweb/wsgicgi.py
1
2280
# hgweb/wsgicgi.py - CGI->WSGI translator # # Copyright 2006 Eric Hopper <hopper@omnifarious.org> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2, incorporated herein by reference. # # This was originally copied from the public domain code at # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side import os, sys from upmana.mercurial import util def launch(application): util.set_binary(sys.stdin) util.set_binary(sys.stdout) environ = dict(os.environ.iteritems()) environ.setdefault('PATH_INFO', '') if '.cgi' in environ['PATH_INFO']: environ['PATH_INFO'] = environ['PATH_INFO'].split('.cgi', 1)[1] environ['wsgi.input'] = sys.stdin environ['wsgi.errors'] = sys.stderr environ['wsgi.version'] = (1, 0) environ['wsgi.multithread'] = False environ['wsgi.multiprocess'] = True environ['wsgi.run_once'] = True if environ.get('HTTPS','off').lower() in ('on','1','yes'): environ['wsgi.url_scheme'] = 'https' else: environ['wsgi.url_scheme'] = 'http' headers_set = [] headers_sent = [] out = sys.stdout def write(data): if not headers_set: raise AssertionError("write() before start_response()") elif not headers_sent: # Before the first output, send the stored headers status, response_headers = headers_sent[:] = headers_set out.write('Status: %s\r\n' % status) for header in response_headers: out.write('%s: %s\r\n' % header) out.write('\r\n') out.write(data) out.flush() def start_response(status, response_headers, exc_info=None): if exc_info: try: if headers_sent: # Re-raise original exception if headers sent raise exc_info[0](exc_info[1], exc_info[2]) finally: exc_info = None # avoid dangling circular ref elif headers_set: raise AssertionError("Headers already set!") headers_set[:] = [status, response_headers] return write content = application(environ, start_response) for chunk in content: write(chunk)
gpl-2.0
6,717,463,400,556,089,000
31.571429
73
0.605263
false
3.819095
false
false
false
blxlrsmb/myap.ml
apml-client/pack.py
1
1076
#!/usr/bin/env python2 # -*- coding: UTF-8 -*- # File: pack.py # Date: Sat Jun 06 16:35:44 2015 +0800 # Author: Yuxin Wu <ppwwyyxxc@gmail.com> from collections import defaultdict class EventPacker(object): """ Only record and pack result in a time interval""" def __init__(self): self.key_cnt = defaultdict(int) self.mouse_cnt = defaultdict(int) self.last_time = None self.start = None def count(self): return sum(self.key_cnt.itervalues()) \ + sum(self.mouse_cnt.itervalues()) def add_key(self, time, window): if not self.start: self.start = time self.last_time = time self.key_cnt[window] += 1 def add_mouse(self, time, window): if not self.start: self.start = time self.last_time = time self.mouse_cnt[window] += 1 def dump(self): dic = {'mouse': dict(self.mouse_cnt), 'key': dict(self.key_cnt), 'start': self.start, 'end': self.last_time} return dic
mit
7,399,868,955,988,437,000
25.243902
57
0.557621
false
3.539474
false
false
false
cooljeanius/emacs
build-aux/vcstocl/vcs_git.py
1
6308
# Git repo support. # Copyright (C) 2019-2020 Free Software Foundation, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. import subprocess import re from vcstocl.misc_util import * class GitRepo: def __init__(self, ignore_list, debug): self.ignore_list = ignore_list self.debug = debug def exec_git_cmd(self, args): ''' Execute a git command and return its result as a list of strings. ''' args.insert(0, 'git') self.debug.print(args) proc = subprocess.Popen(args, stdout=subprocess.PIPE) # Clean up the output by removing trailing spaces, newlines and dropping # blank lines. op = [decode(x[:-1]).strip() for x in proc.stdout] op = [re.sub(r'[\s\f]+', ' ', x) for x in op] op = [x for x in op if x] return op def list_changes(self, commit, frontends): ''' List changes in a single commit. For the input commit id COMMIT, identify the files that have changed and the nature of their changes. Print commit information in the ChangeLog format, calling into helper functions as necessary. ''' op = self.exec_git_cmd(['show', '--pretty=fuller', '--date=short', '--raw', commit]) authors = [] date = '' merge = False copyright_exempt='' subject= '' for l in op: if l.lower().find('copyright-paperwork-exempt:') == 0 \ and 'yes' in l.lower(): copyright_exempt=' (tiny change)' elif l.lower().find('co-authored-by:') == 0 or \ l.find('Author:') == 0: author = l.split(':')[1] author = re.sub(r'([^ ]*)\s*(<.*)', r'\1 \2', author.strip()) authors.append(author) elif l.find('CommitDate:') == 0: date = l[11:].strip() elif l.find('Merge:') == 0: merge = True elif not subject and date: subject = l.strip() # Find raw commit information for all non-ChangeLog files. op = [x[1:] for x in op if len(x) > 0 and re.match(r'^:[0-9]+', x)] # Skip all ignored files. for ign in self.ignore_list: op = [x for x in op if ign not in x] # It was only the ChangeLog, ignore. if len(op) == 0: return print('%s %s' % (date, authors[0])) if (len(authors) > 1): authors = authors[1:] for author in authors: print(' %s' % author) print() if merge: print('\t MERGE COMMIT: %s\n' % commit) return print('\tCOMMIT%s: %s\n\t%s\n' % (copyright_exempt, commit, subject)) # Changes across a large number of files are typically mechanical (URL # updates, copyright notice changes, etc.) and likely not interesting # enough to produce a detailed ChangeLog entry. if len(op) > 100: print('\t* Suppressing diff as too many files differ.\n') return # Each of these lines has a space separated format like so: # :<OLD MODE> <NEW MODE> <OLD REF> <NEW REF> <OPERATION> <FILE1> <FILE2> # # where OPERATION can be one of the following: # A: File added # D: File removed # M[0-9]{3}: File modified # R[0-9]{3}: File renamed, with the 3 digit number following it indicating # what percentage of the file is intact. # C[0-9]{3}: File copied. Same semantics as R. # T: The permission bits of the file changed # U: Unmerged. We should not encounter this, so we ignore it/ # X, or anything else: Most likely a bug. Report it. # # FILE2 is set only when OPERATION is R or C, to indicate the new file name. # # Also note that merge commits have a different format here, with three # entries each for the modes and refs, but we don't bother with it for now. # # For more details: https://git-scm.com/docs/diff-format for f in op: data = f.split() if data[4] == 'A': print('\t* %s: New file.' % data[5]) elif data[4] == 'D': print('\t* %s: Delete file.' % data[5]) elif data[4] == 'T': print('\t* %s: Changed file permission bits from %s to %s' % \ (data[5], data[0], data[1])) elif data[4][0] == 'M': print('\t* %s: Modified.' % data[5]) analyze_diff(data[5], self.exec_git_cmd(['show', data[2]]), self.exec_git_cmd(['show', data[3]]), frontends) elif data[4][0] == 'R' or data[4][0] == 'C': change = int(data[4][1:]) print('\t* %s: Move to...' % data[5]) print('\t* %s: ... here.' % data[6]) if change < 100: analyze_diff(data[6], self.exec_git_cmd(['show', data[2]]), self.exec_git_cmd(['show', data[3]]), frontends) # We should never encounter this, so ignore for now. elif data[4] == 'U': pass else: eprint('%s: Unknown line format %s' % (commit, data[4])) sys.exit(42) print('') def list_commits(self, revs): ''' List commit IDs between the two revs in the REVS list. ''' ref = revs[0] + '..' + revs[1] return self.exec_git_cmd(['log', '--pretty=%H', ref])
gpl-3.0
-7,940,138,127,483,870,000
37.699387
84
0.527267
false
3.922886
false
false
false