code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
from __future__ import unicode_literals
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from django.urls import reverse
from django.utils.dateparse import *
from django.shortcuts import render
from django.template import RequestContext
from django.db.models import F, Sum, Count
from django.template import loader
from django.db.models.functions import Concat
from django.db.models import Value as V
from django.db.models import Max
from django.views.decorators.csrf import csrf_exempt
from .models import Vehicle, Driver, Trip
import geo
# Create your views here.
def index(request):
latest_trips_list = Trip.objects.order_by('-arrival_time').annotate(distance=F('arrival_mileage')-F('departure_mileage'))[:5]
vehicles = Vehicle.objects.order_by('license_plate')
context = {
'latest_trips_list': latest_trips_list,
'vehicles': vehicles,
}
last_month = Trip.objects.filter(departure_time__gte=datetime.datetime.date(datetime.datetime.now()) - datetime.timedelta(days=30))
lm_vehicles = list(set([x.vehicle for x in last_month]))
context['lastmonth_vehicles'] = Vehicle.objects.annotate(newest_trip=Max('trip__departure_time')) \
.filter(newest_trip__gte=datetime.datetime.date(datetime.datetime.now()) - datetime.timedelta(days=30))
context['lastmonth_drivers'] = Driver.objects.annotate(newest_trip=Max('trip__departure_time')) \
.filter(newest_trip__gte=datetime.datetime.date(datetime.datetime.now()) - datetime.timedelta(days=30))
for veh in context['lastmonth_vehicles']:
try:
lm_trips = Trip.objects.filter(vehicle=veh,departure_time__gte=datetime.datetime.date(datetime.datetime.now()) - datetime.timedelta(days=30))\
.annotate(distance=F('arrival_mileage')-F('departure_mileage')).order_by('-arrival_time')
veh.month_distance = lm_trips.aggregate(a=Sum('distance'))['a']
veh.last_trip = lm_trips[0]
except:
pass
for dri in context['lastmonth_drivers']:
try:
lm_trips = Trip.objects.filter(driver=dri,departure_time__gte=datetime.datetime.date(datetime.datetime.now()) - datetime.timedelta(days=30))\
.annotate(distance=F('arrival_mileage')-F('departure_mileage')).order_by('-arrival_time')
dri.month_distance = lm_trips.aggregate(a=Sum('distance'))['a']
dri.last_trip = lm_trips[0]
except:
pass
context['lastmonth_numdrivers'] = len(context['lastmonth_drivers'])
context['lastmonth_distance'] = last_month \
.annotate(distance=F('arrival_mileage')-F('departure_mileage')) \
.aggregate(total_distance=Sum('distance'))['total_distance']
context['lastmonth_numvehicles'] = len(context['lastmonth_vehicles'])
template = loader.get_template('autologbackend/index.html')
return HttpResponse(template.render(context,request))
def vehicle_detail(request,vehicle_id):
vehicle = Vehicle.objects.get(pk=vehicle_id)
latest_vehicle_trips = Trip.objects.filter(vehicle=vehicle).order_by('-arrival_time').annotate(distance=F('arrival_mileage')-F('departure_mileage'))[:15]
last_trip = latest_vehicle_trips[0]
context = {
'vehicle': vehicle,
'latest_vehicle_trips': latest_vehicle_trips,
'last_trip' : last_trip,
}
template = loader.get_template('autologbackend/vehicle_detail.html')
return HttpResponse(template.render(context,request))
def log_trip(request, trip_id=-1, errors=""):
vehicles = Vehicle.objects.order_by('license_plate')
drivers = Driver.objects.order_by('name')
context = {
'vehicles' : vehicles,
'drivers' : drivers,
}
if trip_id != -1:
trip = Trip.objects.get(pk=trip_id)
context['trip'] = trip
if errors !="":
context['errors'] = errors
template = loader.get_template('autologbackend/log_trip.html')
return HttpResponse(template.render(context,request))
@csrf_exempt
def submit_log_trip(request, trip_id=-1):
errors = []
if 'vehicle' in request.POST and request.POST['vehicle'] != '':
try:
selected_vehicle = Vehicle.objects.get(pk=request.POST['vehicle'])
except IndexError:
errors.append("Invalid vehicle ID.")
else:
errors.append("No vehicle selected.")
if 'driver' in request.POST and request.POST['driver'] != '':
try:
selected_driver = Driver.objects.get(pk=request.POST['driver'])
except IndexError:
errors.append("Invalid driver ID.")
else:
errors.append("No driver selected.")
if 'departure_mileage' in request.POST and request.POST['departure_mileage'] != '':
try:
departure_mileage = int(request.POST['departure_mileage'])
if departure_mileage < 0:
errors.append("Departure mileage is negative.")
except ValueError:
errors.append("Departure mileage is not a number.")
else:
errors.append("No departure mileage specified.")
if 'arrival_mileage' in request.POST and request.POST['arrival_mileage'] != '':
try:
arrival_mileage = int(request.POST['arrival_mileage'])
if arrival_mileage < 0:
errors.append("Arrival mileage is negative.")
except ValueError:
errors.append("Arrival mileage is not a number.")
else:
errors.append("No arrival mileage specified.")
if 'departure_date' in request.POST and 'departure_time' in request.POST:
try:
departure_time = datetime.datetime.combine(
parse_date(request.POST["departure_date"]),
parse_time(request.POST["departure_time"])
)
except:
errors.append("Departure date/time is not a valid date/time.")
elif 'departure_datetime' in request.POST and request.POST['departure_datetime'] != '':
departure_time = parse_datetime(request.POST["departure_datetime"])
if departure_time == None:
errors.append("Departure date/time is not a valid date/time.")
else:
errors.append("No departure date/time specified.")
if 'arrival_date' in request.POST and 'arrival_time' in request.POST:
try:
arrival_time = datetime.datetime.combine(
parse_date(request.POST["arrival_date"]),
parse_time(request.POST["arrival_time"])
)
except:
errors.append("Arrival date/time is not a valid date/time.")
elif 'arrival_datetime' in request.POST and request.POST['arrival_datetime'] != '':
arrival_time = parse_datetime(request.POST["arrival_datetime"])
if arrival_time == None:
errors.append("Arrival date/time is not a valid date/time.")
else:
errors.append("No arrival date/time specified.")
if 'departure_location' in request.POST and request.POST['departure_location'] != '':
departure_location = geo.location_from_name(request.POST["departure_location"])
if departure_location == None:
departure_location = Location(lat = 0, lon = 0, description = request.POST['departure_location'])
elif 'departure_location_lat' in request.POST and 'departure_location_lon' in request.POST:
departure_location = geo.location_from_coords(float(request.POST["departure_location_lat"]),float(request.POST["departure_location_lon"]))
if departure_location == None:
errors.append("No results for geolocation at departure coordinates.")
if 'arrival_location' in request.POST and request.POST['arrival_location'] != '':
arrival_location = geo.location_from_name(request.POST["arrival_location"])
if arrival_location == None:
arrival_location = Location(lat = 0, lon = 0, description = request.POST['arrival_location'])
elif 'arrival_location_lat' in request.POST and 'arrival_location_lon' in request.POST:
arrival_location = geo.location_from_coords(float(request.POST["arrival_location_lat"]),float(request.POST["arrival_location_lon"]))
if arrival_location == None:
errors.append("No results for geolocation at arrival coordinates.")
else:
errors.append("No arrival location specified.")
if not errors:
if trip_id == -1:
trip = Trip(
vehicle=selected_vehicle,
driver=selected_driver,
departure_time = departure_time,
departure_mileage = departure_mileage,
departure_location = departure_location,
arrival_time = arrival_time,
arrival_mileage = arrival_mileage,
arrival_location = arrival_location
)
trip.save()
else:
trip = Trip.objects.get(pk=trip_id)
trip.vehicle=selected_vehicle
trip.driver=selected_driver
trip.departure_time = departure_time
trip.departure_mileage = departure_mileage
trip.departure_location = departure_location
trip.arrival_time = arrival_time
trip.arrival_mileage = arrival_mileage
trip.arrival_location = arrival_location
trip.save()
if 'm' in request.POST and request.POST['m'] == 'm':
return HttpResponseRedirect(reverse('mobile'))
else:
return HttpResponseRedirect(reverse('trips'))
else:
if 'm' in request.POST and request.POST['m'] == 'm':
return HttpResponseRedirect(reverse('mobile'))
else:
return log_trip(request, errors)
def edit_trip(request, trip_id):
trip = Trip.objects.get(pk=trip_id)
vehicles = Vehicle.objects.order_by('license_plate')
drivers = Driver.objects.order_by('name')
context = {
'trip' : trip,
'vehicles' : vehicles,
'drivers' : drivers,
}
template = loader.get_template('autologbackend/edit_trip.html')
return HttpResponse(template.render(context,request))
def submit_edit_trip(request, trip_id):
trip = Trip.objects.get(pk=trip_id)
trip.vehicle= Vehicle.objects.get(pk=request.POST['vehicle'])
trip.driver = Driver.objects.get(pk=request.POST['driver'])
if 'arrival_location' in request.POST:
arrival_location = geo.location_from_name(request.POST["arrival_location"])
elif 'arrival_location_lat' in request.POST and 'arrival_location_lon' in request.POST:
arrival_location = geo.location_from_coords(float(request.POST["arrival_location_lat"]),float(request.POST["arrival_location_lon"]))
if 'departure_location' in request.POST:
trip.departure_location = geo.location_from_name(request.POST["departure_location"])
elif 'departure_location_lat' in request.POST and 'departure_location_lon' in request.POST:
trip.departure_location = geo.location_from_coords(float(request.POST["departure_location_lat"]),float(request.POST["departure_location_lon"]))
trip.departure_mileage = int(request.POST['departure_mileage'])
trip.arrival_mileage = int(request.POST['arrival_mileage'])
trip.departure_time = datetime.datetime.combine(
parse_date(request.POST["departure_date"]),
parse_time(request.POST["departure_time"])
)
trip.arrival_time = datetime.datetime.combine(
parse_date(request.POST["arrival_date"]),
parse_time(request.POST["arrival_time"])
)
trip.save()
return HttpResponseRedirect(reverse('trips', kwargs={'page_nr': 0}))
def register_driver(request):
return render(request,'autologbackend/register_driver.html')
def submit_register_driver(request):
try:
name = request.POST['name']
except KeyError as e:
return HttpResponse('Something went wrong. <br />' + e.toString())
else:
driver = Driver(name = name)
driver.save()
return HttpResponseRedirect(reverse('index'))
def register_vehicle(request):
return render(request,'autologbackend/register_vehicle.html')
def submit_register_vehicle(request):
request_context = RequestContext(request)
try:
license_plate = request.POST['license_plate']
build_year = request.POST['build_year']
make = request.POST['make']
model = request.POST['model']
mileage = int(request.POST['mileage'])
mileage_unit = request.POST['mileage_unit']
except KeyError as e:
return HttpResponse('Something went wrong. <br />' + e.toString())
else:
vehicle = Vehicle(
license_plate=license_plate,
build_year = build_year,
vehicle_make = make,
vehicle_model = model,
mileage_unit = mileage_unit
)
vehicle.save()
return HttpResponseRedirect(reverse('index'))
def trips(request):
RESULTS_PER_PAGE = 20
trips_list = Trip.objects.order_by('-arrival_time')
trips_list = trips_list.annotate(distance=F('arrival_mileage')-F('departure_mileage'))
trips_list = trips_list.annotate(veh=Concat('vehicle__license_plate', V(' '), 'vehicle__vehicle_make', V(' '), 'vehicle__vehicle_model'))
context = {}
try:
trips_list = trips_list.filter(veh__icontains=request.GET['vehicle'])
except:
pass
try:
trips_list = trips_list.filter(driver__name__icontains=request.GET['driver'])
except:
pass
try:
trips_list = trips_list.filter(distance__gte=request.GET['min_dist'])
except:
pass
try:
trips_list = trips_list.filter(distance__lte=request.GET['max_dist'])
except:
pass
try:
trips_list = trips_list.filter(arrival_time__gte= parse_date(request.GET['begin_date']))
except:
pass
try:
trips_list = trips_list.filter(arrival_time__lte= parse_date(request.GET['end_date'])+ datetime.timedelta(days=1))
except:
pass
if 'arrival_location' in request.GET and request.GET['arrival_location']!='':
if 'torange' in request.GET and request.GET['torange'] == '':
rng = 0
elif 'torange' in request.GET:
rng = float(request.GET['torange'])
else:
rng = 0
if rng == 0:
trips_list = trips_list.filter(arrival_location__description__icontains=request.GET['arrival_location'])
if 'departure_location' in request.GET and request.GET['departure_location']!='':
if 'fromrange' in request.GET and request.GET['fromrange'] == '':
rng = 0
elif 'fromrange' in request.GET:
rng = float(request.GET['fromrange'])
else:
rng = 0
if rng == 0:
trips_list = trips_list.filter(departure_location__description__icontains=request.GET['departure_location'])
else:
loc = geo.location_from_name(request.GET['departure_location'])
trips_list = geo.filter_trips_in_range_from(loc, float(rng), trips_list)
if 'arrival_location' in request.GET and request.GET['arrival_location']!='':
if 'torange' in request.GET and request.GET['torange'] == '':
rng = 0
elif 'torange' in request.GET:
rng = float(request.GET['torange'])
else:
rng = 0
if rng >0:
loc = geo.location_from_name(request.GET['arrival_location'])
trips_list = geo.filter_trips_in_range_to(loc, rng, trips_list)
context['trips_list'] = trips_list
template = loader.get_template('autologbackend/trips.html')
return HttpResponse(template.render(context,request))
def delete_trip(request,trip_id):
Trip.objects.get(pk=trip_id).delete()
return HttpResponseRedirect(reverse('trips'))
def vehicles(request, page_nr):
RESULTS_PER_PAGE = 20
context= {}
vehs = Vehicle.objects.order_by('license_plate').annotate(last_date=Max("license_plate"))
num_pages = vehs.count()/RESULTS_PER_PAGE
if vehs.count()%RESULTS_PER_PAGE:
num_pages += 1
for e in vehs:
try:
e.last_trip = Trip.objects.filter(vehicle=e).order_by('-arrival_time')[0]
except:
pass
context['vehicle_list'] = vehs
context['page_nr'] = page_nr
context['num_pages'] = num_pages
context['prev_page_nr'] = 0 if int(page_nr) == 0 else int(page_nr) - 1
context['next_page_nr'] = int(page_nr) + 1 if int(page_nr) < (num_pages - 1) else int(page_nr)
template = loader.get_template('autologbackend/vehicles.html')
return HttpResponse(template.render(context,request))
def drivers(request, page_nr):
RESULTS_PER_PAGE = 20
context= {}
dris = Driver.objects.order_by('name')
num_pages = dris.count()/RESULTS_PER_PAGE
if dris.count()%RESULTS_PER_PAGE:
num_pages += 1
for e in dris:
try:
e.last_trip = Trip.objects.filter(driver=e).order_by('-arrival_time')[0]
except:
pass
context['driver_list'] = dris
context['page_nr'] = page_nr
context['num_pages'] = num_pages
context['prev_page_nr'] = 0 if int(page_nr) == 0 else int(page_nr) - 1
context['next_page_nr'] = int(page_nr) + 1 if int(page_nr) < (num_pages - 1) else int(page_nr)
template = loader.get_template('autologbackend/drivers.html')
return HttpResponse(template.render(context,request))
def vehicles_bare(request):
context= {}
vehs = Vehicle.objects.all()
context['vehicle_list'] = vehs
template = loader.get_template('autologbackend/vehicles_bare.html')
return HttpResponse(template.render(context,request))
def drivers_bare(request):
dri = Driver.objects.all()
context = {}
context['drivers'] = dri
template = loader.get_template('autologbackend/drivers_bare.html')
return HttpResponse(template.render(context,request))
def delete_vehicle(request, vehicle_id):
vehicle = Vehicle.objects.get(pk=vehicle_id)
vehicle.delete()
return HttpResponseRedirect(reverse('vehicles', kwargs={'page_nr': 0}))
def delete_driver(request, driver_id):
vehicle = Vehicle.objects.get(pk=vehicle_id)
vehicle.delete()
return HttpResponseRedirect(reverse('drivers', kwargs={'page_nr': 0}))
def mobile_log(request):
vehicles = Vehicle.objects.order_by('license_plate')
drivers = Driver.objects.order_by('name')
context = {
'vehicles' : vehicles,
'drivers' : drivers,
}
template = loader.get_template('autologbackend/mobile_log.html')
return HttpResponse(template.render(context,request))
def start_log(request):
vehicles = Vehicle.objects.order_by('license_plate')
drivers = Driver.objects.order_by('name')
context = {
'vehicles' : vehicles,
'drivers' : drivers,
}
template = loader.get_template('autologbackend/start_trip.html')
return HttpResponse(template.render(context,request))
def get_location_by_latlong(request, lat, lon):
response = {}
res = geo.name_from_coords(float(lat), float(lon))
if (res == None):
res = ""
response['address'] = res;
return JsonResponse(response)
def mobile(request):
context = {}
template = loader.get_template('autologbackend/mobile.html')
return HttpResponse(template.render(context,request))
def driver_detail(request, driver_id):
driver = Driver.objects.get(pk=driver_id)
latest_vehicle_trips = Trip.objects.filter(driver=driver).order_by('-arrival_time').annotate(distance=F('arrival_mileage')-F('departure_mileage'))[:15]
last_trip = latest_vehicle_trips[0]
context = {
'driver': driver,
'latest_vehicle_trips': latest_vehicle_trips,
'last_trip' : last_trip,
}
template = loader.get_template('autologbackend/driver_detail.html')
return HttpResponse(template.render(context,request))
| ReneBrals/RESS-AutoLog | autologbackend/views.py | Python | mit | 17,918 |
import firenado.conf
from firenado import tornadoweb
class IndexHandler(tornadoweb.TornadoHandler):
def get(self):
if not firenado.conf.session['enabled']:
self.write("Session is disabled as supposed to be.<br>")
if self.session is None:
self.write("Handler session is None.")
else:
self.write("Handler session isn't None. Something wrong "
"happened")
else:
self.write("Session is enabled. Something wrong happened.")
| piraz/firenado | examples/nosessionapp/handlers.py | Python | apache-2.0 | 549 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import config
os.chdir(os.path.dirname(config.__file__)) #Override wsgi directory change
from flask import Flask, Response, render_template, request, session, g
from flask import flash, abort, redirect, url_for, get_flashed_messages
from flask import Markup, make_response
from flask import json, jsonify
from contextlib import closing
import re
import datetime, dateutil
import jinja2
import tempfile
from time import mktime
from functools import wraps
from dblib import postgres as database
import reports.init as report_plugins
app = Flask(__name__)
app.config.from_object("config.DevelopmentConfig")
#Login decorator:
def login_required(original_function):
@wraps(original_function)
def decorated_function(*args, **kwargs):
if session.get('logged_in') is None or session.get('user') is None:
return redirect(url_for('login', next=request.url))
return original_function(*args, **kwargs)
return decorated_function
@app.route('/')
@login_required
def index():
return redirect(url_for('admin'))
@app.route('/profile')
@login_required
def profile(): #Perhaps this should just be a shortlink to /id/<id>
return redirect(url_for('displayRecord', id=session.get('user')['id']))
#~ return render_template('admin-edit-profile.html', user=session.get('user'), form=None)
@app.route('/profile/password')
@login_required
def change_password():
return render_template('admin-change-password.html', user=session.get('user'))
@app.route('/admin/kiosk')
@login_required
def kiosk_settings():
return render_template('kiosk-settings.html', user=session.get('user'), form={})
@app.route('/admin/activities/')
@login_required
def edit_activities():
with app.app_context():
db = get_db()
activities = db.getActivities()
return render_template('admin-edit-activites.html', activities=activities,
user=session.get('user'),)
@app.route('/admin/activities/add', methods=['POST'])
@login_required
def add_activity():
name = request.form.get('name', None)
admin = request.form.get('admin', None)
admin_name = request.form.get('admin_name', '')
if name == '':
flash('Name field cannot be blank.', 'error')
if admin == '':
admin = None
if admin_name == '':
flash('No administrator matching the name provided was found. '
'No administrator will be assigned', 'info')
with app.app_context():
try:
db = get_db()
db.addActivity(name, admin)
db.commit()
flash('Activity added successfully', 'success')
except database.DatabaseError:
flash('There was a database error while trying to add this activity.', 'error')
except database.IntegrityError:
flash('The activity name "{0}" already exists.'.format(name), 'error')
return redirect(url_for('edit_activities'))
return redirect(url_for('edit_activities'))
@app.route('/admin/activities/delete', methods=['POST'])
@login_required
def delete_activity():
id = request.form.get('id', None)
if id is not None:
with app.app_context():
try:
db = get_db()
activity = db.getActivity(id)
db.deleteActivity(id)
db.commit()
flash('The activity "{0}" has been deleted successfully.'
.format(activity['name']), 'success')
except database.DatabaseError as e:
flash('There was a problem deleting the specified activity.', 'error')
return redirect(url_for('edit_activities'))
return abort(400)
@app.route('/admin/activities/edit', methods=['POST'])
@login_required
def edit_activities_ajax():
id = request.form.get('pk', None)
name = request.form.get('name', None)
value = request.form.get('value', None)
if name == 'name':
if value == '':
return make_response('"value" parameter cannot be blank'), 400
with app.app_context():
try:
db = get_db()
db.updateActivity(id, value)
db.commit()
return make_response('OK'), 200
except database.DatabaseError:
return make_response('Database error. Unable to save.'), 400
except database.IntegrityError:
return make_response('Name is already in use.'), 400
elif name == 'admin':
if value == '':
# Remove the administrator (?)
return make_response('"value" parameter cannot be blank'), 400
with app.app_context():
try:
db = get_db()
db.updateActivity(id, name=None, admin=value)
db.commit()
app.logger.debug("Saved {0}, {1}, {2}".format(id, name, value))
return make_response('OK'), 200
except database.DatabaseError:
return make_response('Database error. Unable to save.'), 400
except database.IntegrityError:
return make_response('Name is already in use.'), 400
return make_response('Expected "name", "admin" for name, but got "{0}" instead'.format(name)), 400
@app.route('/admin/reports_endpoint/payroll/edit', methods=['POST'])
@login_required
def edit_payroll_ajax():
id = request.form.get('pk', None)
name = request.form.get('name', None)
value = request.form.get('value', None)
error_msg = 'Invalid time format. Please use 24-hour time format.'
if not re.match('^(?:\d|[01]\d|2[0-3]):[0-5]\d$', value) and value.lower() != 'none':
if name == 'checkout':
error_msg += ' To delete a checkout punch, enter "none".'
return make_response(error_msg), 400
if value.lower() == 'none':
value = None
with app.app_context():
try:
db = get_db()
if name == 'checkin':
db.editCheckinTime(id, value)
elif name == 'checkout':
db.editCheckoutTime(id, value)
db.commit()
#~ return make_response('OK'), 200
time = _jinja2_filter_timedelta(db.getTimeWorked(id))
response = make_response(json.dumps({ 'id' : id, 'time' : time }))
response.headers['Content-Type'] = 'application/json; charset=utf-8'
return response
except database.DatabaseError:
return make_response('Error while updating database'), 400
return make_response('Invalid parameters'), 400
@app.route('/admin/reports_endpoint/payroll/add', methods=['POST'])
@login_required
def add_payroll():
date = request.form.get('date', None)
person = request.form.get('id', None)
next = request.form.get('next', None)
start = request.form.get('start', None)
end = request.form.get('end', None)
if date is None:
flash("Date field is required", "error")
return redirect(next)
if person is None:
flash("You must type in and autocomplete a name to add a punch", "error")
return redirect(next)
with app.app_context():
try:
db = get_db()
if end is None:
db.doCustomCheckin(person, date+' '+start)
else:
db.doCustomCheckin(person, date+' '+start, date+' '+end)
db.commit()
except database.DatabaseError:
flash("Problem while adding row to database.", "error")
return redirect(next)
@app.route('/admin/reports_endpoint/payroll/delete', methods=['POST'])
@login_required
def delete_punch():
id = request.form.get('id', None)
next = request.form.get('next', None)
if id is not None:
with app.app_context():
try:
db = get_db()
db.deleteCheckin(id)
db.commit()
return redirect(next)
except database.DatabaseError:
return abort('400')
@app.route('/admin/reports_endpoint/payroll/time/<id>')
@login_required
def payroll_timedelta(id):
with app.app_context():
db = get_db()
return db.getTimeWorked(id)
@app.route('/admin')
@login_required
def admin():
with app.app_context():
db = get_db()
activities = db.getActivities()
return render_template('admin.html', user=session.get('user'), activities=activities)
@app.route('/admin-register', methods=['GET', 'POST'])
@login_required
def register():
if request.method == 'POST':
#Grab the form values and validate them:
name = request.form['name']
surname = request.form['surname']
email = request.form['email']
admin = request.form.get('admin', False)
password = request.form['password']
confirm_pass = request.form['confirm_pass']
dob = request.form['dob']
barcode = request.form['barcode']
license_number = request.form['license_number']
home_phone = request.form['home_phone']
mobile_phone = request.form['mobile_phone']
sms_capable = request.form.get('sms_capable', False)
newsletter = request.form.get('newsletter', False)
error = False
#checkboxes are weird
if admin == "on": admin = True
if sms_capable == "on": sms_capable = True
if newsletter == "on": newsletter
if name == "":
flash("First name is a required field", 'error')
error = True
if surname == "":
flash("Surname is a required field", 'error')
error = True
if dob == "":
flash("Date of birth is a required field", 'error')
error = True
#Validate date
try:
datetime.datetime.strptime(dob, "%Y-%m-%d").date()
except ValueError as e:
flash("Date of birth must be in YYYY-MM-DD format (e.g. 1990-10-21):" + \
" {0}".format(e.message), 'error')
error = True
#validate email:
if email != "":
if not re.match("^[a-zA-Z0-9._%-+]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$", email):
flash("Please enter a valid email address", 'error')
error = True
#validate password
if admin:
if email == "":
flash("You must specify an email to allow administrative access", 'error')
error = True
if password == "":
flash("You must specify a password to allow administrative access", 'error')
error = True
if password != confirm_pass:
flash("The passwords provided do not match.", 'error')
error = True
if password == "": password = None
#Populate form with pre-filled values if there was an error
form = { 'name' : name, 'surname' : surname, 'email' : email,
'admin' : admin, 'dob' : dob, 'home_phone' : home_phone,
'license_number' : license_number, 'newsletter' : newsletter,
'mobile_phone' : mobile_phone, 'sms_capable' : sms_capable }
#FIXME: For some reason, this delays flashing by one request.
#~ if not get_flashed_messages(category_filter=["error"]):
if not error:
app.logger.debug("Passed registration validation, adding user to database")
#Check to see if someone with the same name and email exists already:
with app.app_context():
db = get_db()
exists, id = db.userExists(name, surname, email)
if exists:
flash('The user <a class="alert-link" href="{0}">{1} {2}</a> already exists.' \
.format(url_for('displayRecord', id=id), Markup.escape(name),
Markup.escape(surname)), 'error')
else:
app.logger.debug("No duplicates found. Registering user in database.")
id = db.addUser(name, surname, email, home_phone, mobile_phone, \
sms_capable, dob, license_number, newsletter=newsletter,
admin=admin, password=password)
if barcode != '':
db.storeBarcode(id, barcode)
db.commit()
flash("<a class=\"alert-link\" href=\"{2}\"><b>{0} {1}</b></a> has been registered successfully." \
.format(Markup.escape(name), Markup.escape(surname),
url_for('displayRecord', id=id)),
'success')
form = None #clear the form
if request.method == 'GET':
form = None #Return a blank form
return render_template('admin-register.html', user=session.get('user'), form=form)
@app.route('/reports')
@login_required
def reports():
with app.app_context():
db = get_db()
note_title = db.getMeta('kiosk_note_title')
#get list of report modules:
available_reports = report_plugins.available_reports
app.logger.debug("Availble reporting plugins:")
app.logger.debug(available_reports)
return render_template('reports.html', user=session.get('user'),
note_title=note_title, show_actions=False, available_reports=available_reports)
@app.route('/reports/<name>')
@login_required
def reportBuild(name):
show_actions = False
with app.app_context():
db = get_db()
note_title = db.getMeta('kiosk_note_title')
# Attempt to import the requested reporting library and logic:
try:
#Import witchcraft
report_function = __import__('reports.{0}'.format(name), fromlist=[''])
output = report_function.build(db=db, request=request)
if output is not None:
if len(output) > 0:
show_actions = True
#~ app.logger.debug("OUTPUT: {0}".format(output))
except ImportError:
return abort(404)
# Run report initializer function
init_data = None
if hasattr(report_function, 'init'):
init_data = report_function.init(db=db, request=request)
#import the appropriate context:
if hasattr(report_function, 'context'):
if 'activities' in report_function.context:
session.activities = db.getActivities()
if 'services' in report_function.context:
session.services = db.getServices()
try:
return render_template('reports-{0}.html'.format(name), user=session.get('user'),
note_title=note_title, show_actions=show_actions, output=output, args=request.args,
available_reports=report_plugins.available_reports, name=name,
query_string=request.query_string, init=init_data)
except jinja2.exceptions.TemplateNotFound:
app.logger.error("Reporting plugin '{0}' has no matching template.".format(name))
flash(u"Reporting plugin '{0}' has no matching template. ".format(name), 'error')
return abort(500)
@app.route('/download/reports/<name>')
@login_required
def reportBuildCSV(name):
with app.app_context():
db = get_db()
note_title = db.getMeta('kiosk_note_title')
# Attempt to import the requested reporting library and logic:
try:
#Import witchcraft
report_function = __import__('reports.{0}'.format(name), fromlist=[''])
output = report_function.build(db=db, request=request)
if output is not None and len(output) > 0:
with tempfile.NamedTemporaryFile(delete=False) as csvfile:
report_function.build_csv(output, csvfile)
with open(csvfile.name) as f:
data = f.read()
date = request.args.get('reportdate', '')
if date == '':
date = request.args.get('startdate', '')
filename = "report-{0}-{1}.csv".format(name, date)
#Serve up the file:
response = make_response(data)
response.headers["Content-Disposition"] = "attachment; filename=" + filename
return response
except ImportError:
return abort(404)
@app.route('/print/reports/<name>')
@login_required
def reportBuildPrint(name):
with app.app_context():
db = get_db()
note_title = db.getMeta('kiosk_note_title')
# Attempt to import the requested reporting library and logic:
try:
#Import witchcraft
report_function = __import__('reports.{0}'.format(name), fromlist=[''])
output = report_function.build(db=db, request=request)
if output is not None:
if len(output) > 0:
show_actions = True
app.logger.debug("OUTPUT: {0}".format(output))
except ImportError:
return abort(404)
#import the appropriate context:
if hasattr(report_function, 'context'):
if 'activities' in report_function.context:
session.activities = 'activities', db.getActivities()
if 'services' in report_function.context:
session.services = db.getServices()
try:
return render_template('reports-{0}-print.html'.format(name),
user=session.get('user'), note_title=note_title, output=output,
args=request.args, name=name, query_string=request.query_string)
except jinja2.exceptions.TemplateNotFound:
app.logger.error("Reporting plugin '{0}' has no matching template.".format(name))
flash(u"Reporting plugin '{0}' has no matching template. ".format(name), 'error')
return abort(500)
@app.route('/search', methods=['GET'])
@login_required
def searchAdmin():
query = request.args.get('q', '')
if query == '':
return redirect(url_for('admin'))
with app.app_context():
db = get_db()
results = db.search(query)
if len(results) == 0:
flash(u"No results for \"{0}\"".format(query), "error")
if len(results) == 1:
#Go directly to the single result:
return redirect(url_for('displayRecord', id=results[0]['id']))
return render_template('search-results.html', user=session.get('user'),
results=results)
@app.route('/autocomplete/names', methods=['GET'])
@login_required
def autocomplete_names():
term = request.args.get('term', None)
if term is None:
return abort(400)
term = term.strip()
with app.app_context():
db = get_db()
results = db.searchName(term, autocomplete=True)
response = make_response(json.dumps(results))
response.headers['Content-Type'] = 'application/json; charset=utf-8'
return response
@app.route('/id/<id>', methods=['GET', 'POST'])
@login_required
def displayRecord(id):
if request.method == "POST":
name = request.form['name']
surname = request.form['surname']
email = request.form['email']
admin = request.form.get('admin', False)
password = request.form['password']
confirm_pass = request.form['confirm_pass']
dob = request.form['dob']
barcode = request.form['barcode']
license_number = request.form['license_number']
home_phone = request.form['home_phone']
mobile_phone = request.form['mobile_phone']
sms_capable = request.form.get('sms_capable', False)
newsletter = request.form.get('newsletter', False)
error = False
#This might not be needed
if admin == "true": admin = True
if sms_capable == "true": sms_capable = True
if newsletter == "true": newsletter = True
app.logger.debug("SMS: " + str(sms_capable))
if name == "":
flash("First name is a required field", 'error')
error = True
if surname == "":
flash("Surname is a required field", 'error')
error = True
if dob == "":
flash("Date of birth is a required field", 'error')
error = True
#Validate date
try:
datetime.datetime.strptime(dob, "%Y-%m-%d").date()
except ValueError as e:
flash("Date of birth must be in YYYY-MM-DD format (e.g. 1990-10-21):" + \
" {0}".format(e.message), 'error')
error = True
#validate email:
if email != "":
if not re.match("^[a-zA-Z0-9._%-+]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$", email):
flash("Please enter a valid email address", 'error')
error = True
#validate password
if admin:
if email == "":
flash("You must specify an email to allow administrative access", 'error')
error = True
if password == "":
flash("You must specify a password to allow administrative access", 'error')
error = True
if password != confirm_pass:
flash("The passwords provided do not match.", 'error')
error = True
if password == "********************":
password = None
confirm_pass = None
if password == "": password = None
if error:
form = request.form #Carryover entered form values
else:
with app.app_context(): #Show the saved values
db = get_db()
db.updateUser(id, name, surname, email=email, admin=admin,
password=password, dob=dob, newsletter=newsletter,
license_number=license_number, home_phone=home_phone,
mobile_phone=mobile_phone, sms=sms_capable)
db.storeBarcode(id, barcode)
db.commit()
form = db.getUserByID(id)
form['barcode'] = db.getBarcodeForId(id)
flash('Changes saved.', 'success')
else: #Render GET request
with app.app_context():
db = get_db()
form = db.getUserByID(id)
form['barcode'] = db.getBarcodeForId(id)
return render_template('profile.html', user=session.get('user'), form=form)
@app.route('/delete/<id>', methods=['POST'])
@login_required
def deleteUser(id):
with app.app_context():
db = get_db()
user = db.getUserByID(id)
if user is not None:
db.deleteUser(id)
db.commit()
flash('{0} {1} has been succesfully deleted.'
.format(Markup.escape(user['name']),
Markup.escape(user['surname'])),
'success')
else:
flash('Unable to delete user id {0}: User does not exist.'.format(id), 'error')
return redirect(url_for('index'))
@app.route('/login', methods=['GET', 'POST'])
def login():
with app.app_context():
db = get_db()
site_title = db.getMeta('site_title')
error = None
if request.method == 'POST':
email = request.form["email"]
password = request.form["password"]
next = request.form["next"]
auth = db.authenticate(email, password)
if auth[0]: #Success
session['logged_in'] = True
#~ auth[1]['dob'] = auth[1]['dob'].isoformat() if hasattr('isoformat')
session['user'] = auth[1]
if next is not None:
return redirect(next)
else:
return redirect(url_for('index'))
else:
flash("Username or password is incorrect", "error")
else:
next = request.args.get("next", None)
#First time setup?
user_count = db.userCount()
if user_count == 1:
return redirect(url_for('install'))
return render_template('login.html', site_title=site_title, next_url=next)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
session.pop('user', None)
flash("You were logged out succesfully", "info")
return redirect(url_for('index'))
@app.route('/install', methods=['GET', 'POST'])
def install():
with app.app_context():
db = get_db()
user_count = db.userCount()
if user_count == 1:
if request.method == 'POST':
name = request.form.get("name", '')
surname = request.form.get("surname", '')
email = request.form.get("email", '')
password = request.form.get("password", '')
confirm_pass = request.form.get("confirm_pass", '')
#Validate form:
error = False
if name == '':
flash("Display name is required", "error")
error = True
if email == '':
flash("Login name or email is required", "error")
error = True
if password == '':
flash("A password is required for the administrator account", "error")
error = True
if password != confirm_pass:
flash("The passwords entered do not match", "error")
error = True
if len(password) < 6:
flash("Passwords should be six or more characters. " +
"Consider changing your password", "info")
if not error:
db.addUser(name, surname, email=email, admin=True, password=password)
db.commit()
flash("Admin account created succesfully. "+
"Login with the username '{0}'".format(email), "success")
return redirect(url_for('admin'))
return render_template('install.html', form=request.form)
return redirect(url_for('admin'))
def fetchKioskConstants():
#TODO: This should probably get cached
db = get_db()
timeout = db.getMeta('kiosk_timeout')
warning = db.getMeta('kiosk_timeout_warning')
title = db.getMeta('kiosk_timeout_title')
msg = db.getMeta('kiosk_timeout_message')
clock_in = db.getMeta('kiosk_clock_in')
clock_out = db.getMeta('kiosk_clock_out')
kiosk_activity = dbBool(db.getMeta('kiosk_activity'))
kiosk_service = dbBool(db.getMeta('kiosk_service'))
return { 'timeout' : timeout, 'timeout_warning' : warning,
'timeout_title' : title, 'timeout_message' : msg,
'clock_in' : clock_in, 'clock_out' : clock_out,
'kiosk_activity' : kiosk_activity,
'kiosk_service' : kiosk_service}
@app.route('/timeclock')
def timeclock(error=None):
if request.method == 'POST':
return redirect(url_for('timeclock-search'))
with app.app_context():
db = get_db()
site_title = db.getMeta('site_title')
search_message = db.getMeta('kiosk_search_message')
return render_template('checkin-search.html', site_title=site_title, search_message=search_message)
@app.route('/timeclock-search')
def timeclockSearch():
search = request.args.get('search', '')
if search == '':
return redirect(url_for('timeclock'))
with app.app_context():
db = get_db()
results = db.search(search)
if len(results) == 0:
flash(u"No results for \"{0}\"".format(search), "error")
return redirect(url_for('timeclock'))
kiosk = fetchKioskConstants()
results_message = db.getMeta('kiosk_results_message')
kiosk_activity = dbBool(db.getMeta('kiosk_activity'))
kiosk_service = dbBool(db.getMeta('kiosk_service'))
target = 'checkinNote'
if kiosk_service:
target = 'selectService'
if kiosk_activity:
target = 'selectActivity'
for record in results:
record['checked_in'] = db.getCheckinStatus(record['id'])
return render_template('checkin-results.html', results_message=results_message,
results=results, search = search, kiosk=kiosk,target=target)
@app.route('/clock-out', methods=['GET'])
def clockOut():
ID = request.args.get('id', '')
with app.app_context():
db = get_db()
if db.getUserByID(ID) is not None:
db.doCheckout(ID)
db.commit()
return redirect(url_for('timeclock'))
@app.route('/select-activity', methods=['GET'])
def selectActivity():
search = request.args.get('search', '')
#~ if search == '':
#~ #For some reason, when following a link to here browsers fetch
#~ #?id=nnn first, and then ?id=nnn&query=s and causes a broken pipe.
#~ abort(200)
ID = request.args.get('id', '')
with app.app_context():
db = get_db()
kiosk_activity = db.getMeta('kiosk_activity')
if not dbBool(kiosk_activity): #Skip activity selection if disabled
app.logger.debug("Activity selection disabled. Skipping to service selection")
return redirect(url_for('selectServices', ID=ID))
activities = db.getActivities()
kiosk = fetchKioskConstants()
title = db.getMeta('kiosk_activity_title')
allow_multiple = db.getMeta('kiosk_activity_allow_multiple')
return render_template('checkin-activity.html', search=search, id=ID,
activities=activities, kiosk=kiosk, title=title, allow_multiple=allow_multiple)
@app.route('/select-services', methods=['GET'])
def selectServices():
with app.app_context():
db = get_db()
search = request.args.get('search', '')
ID = request.args.get('id', '')
activity = request.args.get('activity', '')
kiosk_activity = dbBool(db.getMeta('kiosk_activity'))
if not dbBool(db.getMeta('kiosk_service')): #Skip service selection if disabled
app.logger.debug("Service selection disabled. Skipping to note entry")
return redirect(url_for('checkinNote', id=ID, search=search, activity=activity))
#parse and validate selected activities:
activities = request.args.getlist("activity")
#validate only if activity selection was enabled
if len(activities) == 0 and kiosk_activity:
flash("You must pick at least one activity", "error")
return redirect(url_for('selectActivity', ID=ID))
services = db.getServices()
kiosk = fetchKioskConstants()
title = db.getMeta('kiosk_service_title')
allow_multiple = dbBool(db.getMeta('kiosk_service_allow_multiple'))
kiosk["opt_service"] = dbBool(db.getMeta('kiosk_opt_service'))
return render_template('checkin-services.html', search=search, activities=activities, id=ID,
services=services, kiosk=kiosk, title=title, allow_multiple=allow_multiple)
@app.route('/select-opt-service', methods=['GET'])
def checkinOptService():
with app.app_context():
db = get_db()
kiosk_service = dbBool(db.getMeta('kiosk_service'))
search = request.args.get('search', '')
ID = request.args.get('id', '')
activities = request.args.getlist("activity")
services = request.args.getlist("service")
opt_services = db.getServices()
kiosk = fetchKioskConstants()
title = db.getMeta('kiosk_service_opt_title')
allow_multiple = dbBool(db.getMeta('kiosk_service_opt_allow_multiple'))
#validate only if service selection was enabled
if len(services) == 0 and kiosk_service:
flash("You must pick at least one service", "error")
return selectServices()
return render_template('checkin-opt-services.html', search=search, activities=activities, id=ID,
services=services, kiosk=kiosk, title=title, allow_multiple=allow_multiple,
opt_services=opt_services)
pass
@app.route('/checkin-note', methods=['GET'])
def checkinNote():
search = request.args.get('search', '')
ID = request.args.get('id', '')
#parse and validate selected activities:
activities = request.args.getlist("activity")
services = request.args.getlist("service")
opt_services = request.args.getlist("opt_services")
with app.app_context():
db = get_db()
kiosk_service = dbBool(db.getMeta('kiosk_service'))
kiosk = fetchKioskConstants()
title = db.getMeta('kiosk_note_title')
if len(services) == 0 and kiosk_service:
flash("You must pick at least one service", "error")
return selectServices()
return render_template('checkin-note.html', kiosk=kiosk, title=title, id=ID, search=search,
activities=activities, services=services, opt_services=opt_services)
@app.route('/checkin-confirm', methods=['POST'])
def checkinConfirm():
search = request.form['search']
ID = int(request.form['id'])
#parse and validate selected activities:
activitiesID = request.form.getlist("activity")
servicesID = request.form.getlist("service")
opt_servicesID = request.form.getlist("opt_services")
note = request.form.get('message', None)
if note == '': note = None
with app.app_context():
db = get_db()
kiosk = fetchKioskConstants()
person = db.getUserByID(ID)
note_title = db.getMeta('kiosk_note_title')
#Dereference activities and services by ID to their names:
activities = db.getActivityNameList(activitiesID)
activitiesString = ", ".join(activities)
activitiesString = activitiesString.decode('utf8')
services = db.getServiceNameList(servicesID)
servicesString = ", ".join(services)
servicesString = servicesString.decode('utf8')
opt_services = db.getServiceNameList(opt_servicesID)
opt_servicesString = ", ".join(opt_services)
opt_servicesString = servicesString.decode('utf8')
#Record the check-in
db.doCheckin(person['id'], activities, services, note, opt_services)
db.commit()
return render_template('checkin-confirm.html', person=person, activities=activitiesString,
services=servicesString, note=note, note_title=note_title,
opt_services=opt_servicesString)
@app.errorhandler(500)
def applicationError(error):
return render_template('error.html'), 500
def get_db():
app.logger.debug("Enter get_db()")
if not hasattr(g, 'db'):
app.logger.debug("No cached database cursor. Opening new connection...")
try:
g.db = connect_db()
except database.DatabaseError as e:
app.logger.error(e)
if e.code == database.INVALID_PASSWORD:
app.logger.error("Invalid password")
flash(u'Configuration Error: Invalid database password provided', 'error')
elif e.code == database.FAIL:
app.logger.error("Unable to connect to database.")
flash(u'<strong>Error: </strong>Unable to connect to database. Please check your configuration.', 'error')
flash(u'<br>Details:<pre>{0}</pre>'.format(str(e)), 'error')
abort(500)
return g.db
def connect_db():
hostname = app.config['DB_HOSTNAME'] + ':' + str(app.config['DB_PORT'])
dbname = app.config['DB_NAME']
user = app.config['DB_USER']
passwd = app.config['DB_PASSWORD']
app.logger.debug("Attempting to open database {0}@{1}...".format(dbname, hostname))
return database.Database(hostname, dbname, user, passwd)
def dbBool(string):
if string == 't':
return True
elif string == 'f':
return False
else:
raise ValueError("Expected either 't' or 'f'")
@app.teardown_request
@app.teardown_appcontext
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
app.logger.debug("Close db")
db.close()
@app.template_filter('strftime')
def _jinja2_filter_datetime(date, fmt=None):
if not hasattr(date, 'replace') and not hasattr(date, 'strftime'):
return None
native = date.replace(tzinfo=None)
format='%H:%M' #TODO: i18n
return native.strftime(format)
@app.template_filter('timedelta')
def _jinja2_filter_timedelta(timedelta):
if not isinstance(timedelta, datetime.timedelta):
return None
hours, remainder = divmod(timedelta.seconds, 3600)
minutes, seconds = divmod((timedelta.seconds - (3600*hours)), 60)
return '%s:%02d:%02d' % (hours, minutes, seconds)
@app.template_filter('userbyid')
def _jinja2_filter_userbyid(id):
app.logger.debug("Filter dereference on id {0}".format(id))
db = get_db()
user = db.getUserByID(id)
return user
"""
Custom JSON encoder to handle datetime representations:
"""
class DatetimeJSONEncoder(json.JSONEncoder):
def default(self, obj):
#datetimes encoded in UNIX time
if isinstance(obj, datetime.datetime):
return int(mktime(obj.timetuple()))
elif isinstance(obj, datetime.date):
return int(mktime(obj.timetuple()))
return json.JSONEncoder.default(self, obj)
app.json_encoder = DatetimeJSONEncoder
if __name__ == "__main__":
app.run(host='0.0.0.0')
#app.run()
| rechner/Taxidi-volunteer2 | routes.py | Python | gpl-3.0 | 34,530 |
def test():
a = {}
b = {}
c = {}
d = {}
i = 0
while i < 1e6:
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
# 100
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
a = a; a = b; a = c; a = d; b = a; b = b; b = c; b = d; c = a; c = b
i += 1
test()
| kphillisjr/duktape | tests/perf/test-reg-readwrite-object.py | Python | mit | 8,045 |
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:40001")
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
| KevinBiomech/GameCredits | contrib/wallettools/walletunlock.py | Python | mit | 159 |
from distutils.core import setup
setup(
name='RiboMethSeq',
version='0.1.1',
author='Brett N. Olsen',
author_email='brett.olsen@gmail.com',
packages=['ribomethseq'],
scripts=['bin/calculate-ribomethseq-from-bam', 'bin/bed-to-wig', 'bin/split-bed'],
license='LICENSE',
description='Implementation of the RiboMethSeq analysis algorithms',
install_requires=[
"numpy",
"pysam",
],
)
| caethan/ribomethseq | setup.py | Python | mit | 436 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class Cobbler(Plugin):
plugin_name = "cobbler"
class RedHatCobbler(Cobbler, RedHatPlugin):
"""Cobbler installation server
"""
packages = ('cobbler',)
profiles = ('cluster', 'sysmgmt')
def setup(self):
self.add_copy_spec([
"/etc/cobbler",
"/var/log/cobbler",
"/var/lib/rhn/kickstarts",
"/var/lib/cobbler"
])
class DebianCobbler(Cobbler, DebianPlugin, UbuntuPlugin):
packages = ('cobbler',)
def setup(self):
self.add_copy_spec([
"/etc/cobbler",
"/var/log/cobbler",
"/var/lib/cobbler"
])
self.add_forbidden_path("/var/lib/cobbler/isos")
# vim: et ts=4 sw=4
| codificat/sos | sos/plugins/cobbler.py | Python | gpl-2.0 | 1,477 |
import server, sys
from hardwarepixel import HardwarePixel
__ESP__ = True
try:
import network
except Exception as exc:
print("Failed to import hardware packages, switching to no ESP mode")
sys.print_exception(exc)
__ESP__ = False
class Handler:
def __init__(self):
if __ESP__:
self.host = network.WLAN(network.STA_IF).ifconfig()[0]
else:
self.host = '127.0.0.1'
self.port = 2525
self.numberOfLeds = 256
self.hardwarePixel = HardwarePixel(self.numberOfLeds)
def start(self):
print("Starting server")
server.run_server(self.host, self.port, self.numberOfLeds, self.onMsg)
def onMsg(self, data):
#print("onMsg", data)
#server.handle_osc(data, self.dispatchMessage)
self.hardwarePixel.writeDump(data)
#self.hardwarePixel.flush()
def dispatchMessage(self, timetag, data):
#print("dispatchMessage", timetag, data)
oscaddr, tags, args = data
if oscaddr == '/l':
#print("index", index, "color", color)
self.hardwarePixel.write(args[0][3], args[0][0:3])
else:
print("unknown message address", oscaddr)
| mostley/lightnet | handlers/micropython-osc/src/handler.py | Python | mit | 1,213 |
import os
USE_SYMENGINE = os.getenv('USE_SYMENGINE')
if USE_SYMENGINE:
from symengine.sympy_compat import (Symbol, Integer, sympify, S,
SympifyError, exp, log, gamma, sqrt, I, E, pi, Matrix,
sin, cos, tan, cot, csc, sec, asin, acos, atan, acot, acsc, asec,
sinh, cosh, tanh, coth, asinh, acosh, atanh, acoth,
lambdify, symarray, diff, zeros, eye, diag, ones, zeros,
expand, Function, symbols, var, Add, Mul, Derivative)
from symengine.sympy_compat import AppliedUndef
#TODO: Fix this
from symengine.lib.symengine_wrapper import (Matrix as ImmutableMatrix,
MatrixBase)
else:
from sympy import (Symbol, Integer, sympify, S,
SympifyError, exp, log, gamma, sqrt, I, E, pi, Matrix,
sin, cos, tan, cot, csc, sec, asin, acos, atan, acot, acsc, asec,
sinh, cosh, tanh, coth, asinh, acosh, atanh, acoth,
lambdify, symarray, diff, zeros, eye, diag, ones, zeros,
expand, Function, symbols, var, Add, Mul, Derivative)
from sympy.core.function import AppliedUndef
from sympy import ImmutableMatrix, MatrixBase
| jaimahajan1997/sympy | sympy/core/backend.py | Python | bsd-3-clause | 1,114 |
import json
import numpy as np
import pandas as pd
from utils import serialize_dict, serialize_merge_dict, \
map_to_unrealized_pnl, map_to_holding_values
from statics import OrderType, PositionType, PositionStatus, CurrencyType, \
TradingExecuteFlag, BarColNames, ORD_POS_MAPPING
from errors import KernelOrderError, KernelPositionError, KernelAccountError, \
KernelBacktestError
__author__ = 'zed'
# ----------------------------------------------------------------------
# Order object.
class Order:
"""
Json-like trading order profile object.
<example>
body = {
'instrument': 'EUR_USD',
'direction': 'ORD_SHORT',
'time': datetime(2015,10,08,12,0,0),
'price': 2.588,
'volume': 100,
}
"""
__keys = ['instrument', 'direction', 'time', 'price', 'volume', 'target']
def __init__(self, instrument, direction, time, price, volume,
target_id=None):
"""
Constructor.
:param instrument: String; name of instrument.
:param direction: OrderType(Enum[5]) object; order direction.
<Values>:
- OrderType.buy = 'ORD_BUY'
- OrderType.short = 'ORD_SHORT'
- OrderType.fill = 'ORD_FILL'
- OrderType.sell = 'ORD_SELL'
- OrderType.none = 'ORD_NONE'
:param time: datetime.datetime object; placement time.
:param price: double.
:param volume: int/double; Required only by buy/short order.
:param target_id: <>; target position id.
<Default>: None, Required only by partial sell/fill order.
:return:
"""
# Type check.
if self.__type_check(direction):
values = [instrument, direction, time, price, volume, target_id]
self.body = dict(zip(self.__keys, values))
self.instrument = values[0]
self.direction = values[1]
@staticmethod
def __type_check(direction):
"""
Type check for constructor
:return:
"""
if not direction.__class__ == OrderType:
msg = '[KERNEL::Order]: Unable to construct Order object. '
raise KernelOrderError(msg)
return True
@classmethod
def open(cls, instrument, direction, time, price, volume):
"""
Reload constructor for order that opens a position.
:return: Order object.
"""
# Type check.
if not (direction in [OrderType.buy, OrderType.short]):
msg = '[KERNEL::Order]: Unable to construct Order object. '
raise KernelOrderError(msg)
return cls(instrument, direction, time, price, volume)
@classmethod
def close(cls, instrument, direction, time, price):
"""
Reload constructor for order that closes a position.
Does not require volume parameter.
:return: Order object.
"""
# Type check.
if not (direction in [OrderType.sell, OrderType.fill]):
msg = '[KERNEL::Order]: Unable to construct Order object. '
raise KernelOrderError(msg)
return cls(instrument, direction, time, price, None)
@classmethod
def close_partial(cls, instrument, direction, time, price,
volume, target_id):
"""
Reload constructor for order that partially closes a position.
:return: Order object.
"""
# Type check.
if not (direction in [OrderType.sell, OrderType.fill]):
msg = '[KERNEL::Order]: Unable to construct Order object. '
raise KernelOrderError(msg)
return cls(instrument, direction, time, price, volume, target_id)
def cash_flow(self):
"""
:return: double; order cash flow.
"""
return self.body['volume'] * self.body['price']
def export_price_dict(self):
"""
:return: dict; {instrument: price} pair.
"""
return {self.instrument: self.body['price']}
def export_to_position(self):
"""
Export data in order to construct corresponding Position.
For OrderType in [buy, short].
:return: list; [instrument, PositionType, vol, time, price]
"""
return [self.body['instrument'], ORD_POS_MAPPING[self.direction],
self.body['volume'], self.body['time'], self.body['price']]
def export_time_price(self):
"""
Export data in order to close Position.
For OrderType in [sell, fill],
:return: list; [time, price]
"""
return [self.body['time'], self.body['price']]
def view(self):
"""
Print order.
:return:
"""
print json.dumps(serialize_dict(self.body), indent=4, sort_keys=True)
# ----------------------------------------------------------------------
# Position object.
class Position:
"""
Json-like position log object.
<privates>
- status: status flag.
- POSITION_STATUS_OPEN = 'POS_OPENING'
- POSITION_STATUS_CLOSED = 'POS_CLOSED'
- body: dict; position content.
<example>
body = {
'instrument': 'EUR_USD',
'direction': 'POS_LONG',
'volume': 10000,
'openTime': datetime(2015,9,12,0,0,0),
'openPrice': 1.8502,
'closeTime': datetime(2015,9,15,0,0,0),
'closePrice': 1.8507,
'realizedPnL': 5,
"""
# Keys of self.body dict.
__keys = ['instrument', 'direction', 'volume', 'open_time', 'open_price',
'close_time', 'close_price', 'realized_pnl']
__keys_open = ['instrument', 'direction', 'volume',
'open_time', 'open_price']
__keys_close = ['close_time', 'close_price']
def __init__(self, order):
"""
Constructor.
:param order: Order object; with order.direction either buy/short.
:return:
"""
if self.__type_check(order):
values = order.export_to_position()
self.body = dict(zip(self.__keys_open, values))
self.status = PositionStatus.open
self.direction = self.body['direction']
@staticmethod
def __type_check(order):
"""
Type check for constructor.
:return:
"""
if order.direction not in [OrderType.buy, OrderType.short]:
msg = '[KERNEL::Position]: Unable to construct Position object. '
raise KernelPositionError(msg)
return True
def __is_open(self):
"""
Check status.
:return: boolean; True if status is open.
"""
return self.status == PositionStatus.open
def view(self, curr_price=None):
"""
Print position with/without unrealized pnl.
:param curr_price: double; current price.
<Default>: None; do not print unrealized pnl.
:return:
"""
if (curr_price is not None) and (self.status == PositionStatus.open):
d = {'unrealized_pnl': self.unrealized_pnl(curr_price)}
print json.dumps(serialize_merge_dict(self.body, d),
indent=4, sort_keys=True)
else:
print json.dumps(serialize_dict(self.body),
indent=4, sort_keys=True)
def open_value(self):
"""
:return: double; position's opening value
"""
return self.body['open_price'] * self.body['volume']
def close_value(self):
"""
:return: double; position's closing value
"""
return self.body['close_price'] * self.body['volume']
def holding_value(self, curr_price):
"""
Calculate an opening position's holding value.
:param curr_price: double; current price.
:return: double; calculated holding value.
"""
# Check status
if self.__is_open():
return curr_price * self.body['volume']
def unrealized_pnl(self, curr_price):
"""
Calculate an opening position's unrealized pnl.
:param curr_price: double; current price.
:return: double; calculated unrealized pnl.
"""
# Check status.
if not self.__is_open():
return
if self.direction == PositionType.long:
return self.body['volume'] * (curr_price - self.body['open_price'])
elif self.direction == PositionType.short:
return self.body['volume'] * (self.body['open_price'] - curr_price)
def __realized_pnl(self):
"""
Calculate realized pnl.
:return: double; unrealized_pnl(self.body['close_price'])
"""
return self.unrealized_pnl(self.body['close_price'])
def close(self, order):
"""
Close this position, calculate realized pnl and record.
:param order: Order object.
:return: double; realized pnl.
"""
if not self.__is_open():
return
try:
if self.direction == PositionType.short:
assert order.direction == OrderType.fill
elif self.direction == PositionType.long:
assert order.direction == OrderType.sell
# Update body.
values = order.export_time_price() # [time, price]
self.body.update(dict(zip(self.__keys_close, values)))
# Realize PnL:
self.body['realized_pnl'] = self.__realized_pnl()
# Close position.
self.status = PositionStatus.closed
# Return realized PnL
return self.body['realized_pnl']
except AssertionError:
msg = '[KERNEL::Position]: Invalid order type ' \
'to close a position.'
raise KernelPositionError(msg)
# ----------------------------------------------------------------------
# Account object.
class Account:
"""
Trading account object.
Suppose the account base is US dollar, then:
- curr_balance has unit of USD.
- positions [BASE/QUOTE: BID/ASK]: sell 1 base for (BID) quote currency.
Buy 1 base at (ASK) quote currency.
Sell 1 base then buy it back, pnl = -1 spread (quote) = BID-ASK < 0
<notes>
- [USD/JPY: 121.0000/121.0001]: volume-(USD), price*volume-(JPY)
pnl-(JPY), margin_used(USD) <- volume.
- [EUR/USD: 1.2204/1.2205]: volume-(EUR), price*volume-(USD)
pnl-(USD), margin_used(USD) <- price*volume.
- [EUR/HKD: 8.6107/8.6108]: volume-(EUR), price*volume-(HKD)
pnl-(HKD), margin_used(USD) <- volume*(EUR/USD)
(i.e. convert volume to USD)
<invokes>
- utils.map_to_holding_values(): return price*volume of Position,
unit is quote currency.
- utils.map_to_unrealized_pnl(): return realized pnl on a Position,
unit is quote currency.
<privates>
* init_cash: double; initial balance.
* leverage: int; account leverage setting.
* base: string; base currency of the account.
"""
def __init__(self, init_cash, leverage, base):
"""
Constructor.
:param init_cash: double/int; initial cash.
:param leverage: int; leverage.
:param base: CurrencyType(Enum); base currency of this account.
:return:
"""
if self.__type_check(init_cash, leverage, base):
self.curr_balance = self.__init_cash = init_cash
self.__leverage = leverage
self.__margin_rate = 1.0/leverage
self.__base = base
# History Containers
self.longs, self.shorts, self.closed = [], [], []
self.record_longs, self.record_shorts, self.record_nav = [], [], []
self.record_orders = []
@staticmethod
def __type_check(init_cash, leverage, base):
"""
Type check for constructor
:return:
"""
if not (base.__class__ == CurrencyType and (
init_cash > 0 and leverage >= 1)):
msg = '[KERNEL::Account]: Unable to construct Account object. '
raise KernelAccountError(msg)
return True
@classmethod
def usd_std(cls):
"""
Construct a USD account with 1 million, leverage = 20.
:return:
"""
return cls(1000000, 20, CurrencyType.USD)
def view(self, curr_prices=None):
"""
View account.
:param curr_prices: dict; {instrument: current price} pairs.
:return:
"""
prompt = {
'base': self.__base,
'leverage': self.__leverage,
'marginRate': self.__margin_rate,
'balance': self.curr_balance,
'longPositions': [p.body for p in self.longs],
'shortPositions': [p.body for p in self.shorts],
'closedPositions': [p.body for p in self.closed]
}
if curr_prices:
additional = {
'nav': self.nav(curr_prices),
'marginUsed': self.margin_used(curr_prices),
'marginAvailable': self.margin_available(curr_prices)
}
prompt.update(additional)
print json.dumps(serialize_dict(prompt), indent=4, sort_keys=True)
def clear_all(self):
"""
Reset account.
:return:
"""
self.curr_balance = self.__init_cash
self.longs, self.shorts, self.closed = [], [], []
# Historical log
self.record_nav, self.record_orders = [], []
self.record_longs, self.record_shorts = [], []
def nav(self, curr_prices):
"""
Calculate net asset value.
:param curr_prices: dict; {instrument: current price} pairs.
:return: double; nav.
"""
# list of unrealized pnl:
unrealized_pnl = map_to_unrealized_pnl(
positions=(self.longs+self.shorts),
prices=curr_prices)
return self.curr_balance + sum(unrealized_pnl)
def margin_used(self, curr_prices):
"""
Calculate margin used.
:param curr_prices: dict; {instrument: current price} pairs.
:return: double; margin used.
"""
holding_values = map_to_holding_values(
self.longs+self.shorts, curr_prices)
return sum(holding_values) * self.__margin_rate
def margin_available(self, curr_prices):
"""
Calculate margin available.
:param curr_prices: dict; {instrument: current price} pairs.
:return: double; margin available.
"""
return max(0, (self.nav(curr_prices)-self.margin_used(curr_prices)))
def __to_sell(self):
"""
Pop a long position.
:return: Position object; or None, if self.longs is [].
"""
return self.longs.pop() if self.longs else None
def __to_fill(self):
"""
Pop a short position.
:return: Position object; or None, if self.shorts is [].
"""
return self.shorts.pop() if self.shorts else None
def __check_margin(self, order, curr_prices):
"""
Check margin for an open order.
:param order: Order object (buy/short).
:param curr_prices: dict; {instrument: current price} pairs.
:return: boolean; enough margin or not.
"""
return (self.margin_available(curr_prices) >=
order.cash_flow())
def handle_mkt_order(self, order, curr_prices=-1):
"""
Handle the order.
:param order: Order object; order to be handled.
:param curr_prices: dict; {instrument: current price} pairs.
<Default>: -1; only one instrument, export pair from order.
:return: TradingExecuteFlag(Enum[2]) object.
- TradingExecuteFlag.good: trade was executed.
- TradingExecuteFlag.bad: trade was not executed.
"""
if order.direction == OrderType.none: # If NONE order:
return TradingExecuteFlag.bad
# Account has single instrument.
if curr_prices == -1:
curr_prices = order.export_price_dict()
# Buy/Short, prepare to open position.
if order.direction in [OrderType.buy, OrderType.short]:
# Check margin.
if self.__check_margin(order, curr_prices):
if order.direction == OrderType.short:
self.shorts.append(Position(order))
elif order.direction == OrderType.buy:
self.longs.append(Position(order))
return TradingExecuteFlag.good
else:
# Fail margin check.
return TradingExecuteFlag.bad
# Sell/Fill, prepare to close position.
else:
if order.direction == OrderType.fill:
p = self.__to_fill()
elif order.direction == OrderType.sell:
p = self.__to_sell()
else:
p = None
if p:
this_realized_pnl = p.close(order)
self.closed.append(p)
# Update current balance.
self.curr_balance += this_realized_pnl
return TradingExecuteFlag.good
else:
return TradingExecuteFlag.bad
def record_nav_ts(self, curr_prices):
"""
Write current to record_nav timeseries.
:param curr_prices: dict; {instrument: current price} pairs.
:return:
"""
# there won't be empty record.
self.record_nav.append(self.nav(curr_prices))
def record_executed_order(self, order):
"""
:param order:
:return:
"""
self.record_orders.append(order)
def record_position_ts(self, curr_prices):
"""
Write current volumes, holding values
to record_longs/shorts timeseries.
:param curr_prices: dict; {instrument: current price} pairs.
:return:
"""
# Initialize as empty.
long_record_one_row = {
'instrument': None,
'direction': PositionType.long,
'volume': 0,
'value': 0
}
short_record_one_row = {
'instrument': None,
'direction': PositionType.short,
'volume': 0,
'value': 0
}
# If opening positions list is non-empty:
if self.longs:
long_record_one_row['volume'] = sum(
[p.body['volume'] for p in self.longs])
long_record_one_row['value'] = sum(
map_to_holding_values(self.longs, curr_prices))
if self.shorts:
short_record_one_row['volume'] = sum(
[p.body['volume'] for p in self.shorts])
short_record_one_row['value'] = sum(
map_to_holding_values(self.shorts, curr_prices))
# Write to main list.
self.record_longs.append(long_record_one_row)
self.record_shorts.append(short_record_one_row)
def export_executed_orders(self):
"""
:return:
"""
all_executed_orders = [order.body for order in self.record_orders]
return pd.DataFrame(all_executed_orders)
def export_positions(self):
"""
Export a frame of all Position objects.
:return: pd.Dataframe object.
"""
all_closed_positions = [p.body for p in self.closed]
return pd.DataFrame(all_closed_positions)
def export_long_position_ts(self):
"""
Export time series of long position summaries.
:return: pd.Dataframe object.
"""
return pd.DataFrame(self.record_longs)
def export_short_position_ts(self):
"""
Export time series of short position summaries.
:return: pd.Dataframe object.
"""
return pd.DataFrame(self.record_shorts)
# ----------------------------------------------------------------------
# Backtest Kernel
class Kernel:
"""
"""
def __init__(self, data, account):
"""
:param data: pd.Dataframe object; bar data.
:param account: Account object;
:return:
"""
if self.__type_check(account):
self.data = data
self.account = account
@staticmethod
def __type_check(account):
"""
Type check for constructor
:return:
"""
if not (account.__class__ == Account):
msg = '[KERNEL::Kernel]: Unable to construct backtest kernel. '
raise KernelBacktestError(msg)
return True
@classmethod
def naive(cls, data):
"""
Naive one-instrument kernel using Account.usd_std()
:return:
"""
return cls(data, Account.usd_std())
def __clear_all(self):
"""
Clear all records.
:return:
"""
self.account.clear_all()
def log(self, curr_prices, order):
"""
:param curr_prices:
:param order:
:return:
"""
self.account.record_nav_ts(curr_prices)
self.account.record_position_ts(curr_prices)
def run_naive(self, strategy):
"""
Run backtest on strategy for <single instrument>.
:param strategy: Strategy object.
:return:
"""
# Clear all records before running.
self.__clear_all()
instrument = strategy.instrument
# Distribute bars.
for row in self.data.iterrows():
bar = row[1] # row is tuple, [0]->index, [1]->data
curr_prices = {instrument: bar[BarColNames.close.value]}
curr_time = bar[BarColNames.time.value]
# Run strategy logic.
order_direction, volume = strategy.on_bar(bar)
# Make order.
order = Order(instrument=instrument,
direction=order_direction,
time=curr_time,
price=bar[BarColNames.close.value],
volume=volume)
# Handle order.
trading_executed_flag = self.account.handle_mkt_order(
order, curr_prices)
# Make records.
self.log(curr_prices, order)
# Only record executed orders.
if trading_executed_flag == TradingExecuteFlag.good:
self.account.record_executed_order(order)
return self.account.record_nav
def export_positions(self):
"""
:return: pd.DataFrame
"""
return self.account.export_positions()
def export_executed_orders(self):
"""
"""
return self.account.export_executed_orders()
# ----------------------------------------------------------------------
# Strategy Template.
class StrategyTemplate:
"""
Strategy Template object.
"""
def __init__(self, fast, slow, instrument='EUR_USD'):
"""
"""
self.instrument = instrument
self.history = []
self.slow = slow
self.fast = fast
self.has_long = 0
self.open_price = 0
self.take_profit = 0
def on_bar(self, bar):
"""
Receive one bar, return signal, volume.
:param bar: dict;
:return: OrderType(Enum) object.
"""
self.history.append(bar)
curr_price = bar[BarColNames.close.value]
# ---------------------------- #
slow = np.mean([b[BarColNames.close.value]
for b in self.history[-1*self.slow:]])
fast = np.mean([b[BarColNames.close.value]
for b in self.history[-1*self.fast:]])
if fast > slow and (not self.has_long):
self.has_long = 1
self.open_price = curr_price
return OrderType.buy, 10000
if fast > slow and curr_price - self.open_price >= 0.01: # take profit
self.has_long = 1
self.open_price = 0
return OrderType.sell, 10000
if curr_price - self.open_price <= -0.005: # stop loss
self.has_long = 1
self.open_price = 0
return OrderType.sell, 10000
elif fast < slow:
self.has_long = 0
return OrderType.sell, 10000
# ---------------------------- #
return OrderType.none, 0
| zedyang/oaForex | kernel.py | Python | mit | 24,595 |
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import functools
import json
try:
import falcon
except ImportError:
import fake_falcon as falcon
import jsonschema
import pecan
from poppy.transport.validators.stoplight import decorators
from poppy.transport.validators.stoplight import exceptions
def req_accepts_json_pecan(request, desired_content_type='application/json'):
# Assume the transport is pecan for now
# for falcon the syntax should actually be:
# request.accept('application/json')
if not request.accept(desired_content_type):
raise exceptions.ValidationFailed('Invalid Accept Header')
def require_accepts_json_falcon(req, resp, params=None):
"""Raises an exception if the request does not accept JSON
Meant to be used as a `before` hook.
:param req: request sent
:type req: falcon.request.Request
:param resp: response object to return
:type resp: falcon.response.Response
:param params: additional parameters passed to responders
:type params: dict
:rtype: None
:raises: falcon.HTTPNotAcceptable
"""
if not req.client_accepts('application/json'):
raise falcon.HTTPNotAcceptable(
u"""
Endpoint only serves `application/json`; specify client-side"""
'media type support with the "Accept" header.',
href=u'http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html',
href_text=u'14.1 Accept, Hypertext Transfer Protocol -- HTTP/1.1')
class DummyResponse(object):
pass
def custom_abort_falcon(error_info=None):
"""Error_handler for with_schema
Meant to be used with falcon transport.
param errors: a list of validation exceptions
"""
ret = DummyResponse()
ret.code = 400
if not isinstance(error_info, collections.Iterable):
error_info = [error_info]
details = dict(errors=[{'message': str(getattr(error, "message", error))}
for error in error_info])
ret.message = json.dumps(details)
return ret
def custom_abort_pecan(errors_info):
"""Error_handler for with_schema
Meant to be used with pecan transport.
param errors: a list of validation exceptions
"""
# TODO(tonytan4ever): gettext support
details = dict(errors=[{'message': str(getattr(error, "message", error))}
for error in errors_info])
pecan.abort(
400,
detail=details,
headers={
'Content-Type': "application/json"})
def with_schema_falcon(request, schema=None):
"""Use to decorate a falcon style controller route
:param request: A falcon request
:param schema: a Json schema to validate against
"""
validation_failed = False
v_error = None
if schema is not None:
errors_list = []
try:
data = json.loads(request.body)
errors_list = list(
jsonschema.Draft3Validator(schema).iter_errors(data))
except ValueError:
validation_failed = True
v_error = ["Invalid JSON body in request"]
if len(errors_list) > 0:
validation_failed = True
v_error = errors_list
if validation_failed:
raise exceptions.ValidationFailed(repr(v_error))
def with_schema_pecan(request, schema=None, handler=custom_abort_pecan,
**kwargs):
"""Used to decorate a Pecan/Flask style controller form validation for
anything else (e.g., POST | PUT | PATCH ).
For an HTTP POST or PUT (RFC2616 unsafe methods) request, the schema is
used to validate the request body.
:param schema: A JSON schema.
:param handler: A Function (Error_handler)
"""
def decorator(f):
def wrapped(*args, **kwargs):
validation_failed = False
v_error = None
errors_list = []
if request.method in ('POST', 'PUT', 'PATCH') and \
schema is not None:
try:
data = json.loads(request.body.decode('utf-8'))
errors_list = list(
jsonschema.Draft3Validator(schema).iter_errors(data))
except ValueError:
validation_failed = True
v_error = ["Invalid JSON body in request"]
if len(errors_list) > 0:
validation_failed = True
v_error = errors_list
if not validation_failed:
return f(*args, **kwargs)
else:
return handler(v_error)
return wrapped
return decorator
def json_matches_schema_inner(request, schema=None):
errors_list = []
try:
data = json.loads(request.body.decode('utf-8'))
except ValueError:
raise exceptions.ValidationFailed('Invalid JSON string')
if schema is not None:
errors_list = list(
jsonschema.Draft3Validator(schema).iter_errors(data))
if len(errors_list) > 0:
details = dict(errors=[{
'message': str(getattr(error, "message", error))}
for error in errors_list])
raise exceptions.ValidationFailed(json.dumps(details))
else:
return
def json_matches_schema(input_schema):
return functools.partial(
json_matches_schema_inner,
schema=input_schema)
@decorators.validation_function
def is_valid_service_name(service_name):
pass
def abort_with_message(error_info):
pecan.abort(400, detail=getattr(error_info, "message", ""),
headers={'Content-Type': "application/json"})
| amitgandhinz/cdn | poppy/transport/validators/helpers.py | Python | apache-2.0 | 6,176 |
"""Support for Freebox devices (Freebox v6 and Freebox mini 4K)."""
from __future__ import annotations
import logging
from typing import Any
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
DATA_RATE_KILOBYTES_PER_SECOND,
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import DeviceInfo
import homeassistant.util.dt as dt_util
from .const import CALL_SENSORS, CONNECTION_SENSORS, DISK_PARTITION_SENSORS, DOMAIN
from .router import FreeboxRouter
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the sensors."""
router = hass.data[DOMAIN][entry.unique_id]
entities = []
_LOGGER.debug(
"%s - %s - %s temperature sensors",
router.name,
router.mac,
len(router.sensors_temperature),
)
entities = [
FreeboxSensor(
router,
SensorEntityDescription(
key=sensor_name,
name=f"Freebox {sensor_name}",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
),
)
for sensor_name in router.sensors_temperature
]
entities.extend(
[FreeboxSensor(router, description) for description in CONNECTION_SENSORS]
)
entities.extend(
[FreeboxCallSensor(router, description) for description in CALL_SENSORS]
)
_LOGGER.debug("%s - %s - %s disk(s)", router.name, router.mac, len(router.disks))
entities.extend(
FreeboxDiskSensor(router, disk, partition, description)
for disk in router.disks.values()
for partition in disk["partitions"]
for description in DISK_PARTITION_SENSORS
)
async_add_entities(entities, True)
class FreeboxSensor(SensorEntity):
"""Representation of a Freebox sensor."""
_attr_should_poll = False
def __init__(
self, router: FreeboxRouter, description: SensorEntityDescription
) -> None:
"""Initialize a Freebox sensor."""
self.entity_description = description
self._router = router
self._attr_unique_id = f"{router.mac} {description.name}"
@callback
def async_update_state(self) -> None:
"""Update the Freebox sensor."""
state = self._router.sensors[self.entity_description.key]
if self.native_unit_of_measurement == DATA_RATE_KILOBYTES_PER_SECOND:
self._attr_native_value = round(state / 1000, 2)
else:
self._attr_native_value = state
@property
def device_info(self) -> DeviceInfo:
"""Return the device information."""
return self._router.device_info
@callback
def async_on_demand_update(self):
"""Update state."""
self.async_update_state()
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Register state update callback."""
self.async_update_state()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
self._router.signal_sensor_update,
self.async_on_demand_update,
)
)
class FreeboxCallSensor(FreeboxSensor):
"""Representation of a Freebox call sensor."""
def __init__(
self, router: FreeboxRouter, description: SensorEntityDescription
) -> None:
"""Initialize a Freebox call sensor."""
super().__init__(router, description)
self._call_list_for_type = []
@callback
def async_update_state(self) -> None:
"""Update the Freebox call sensor."""
self._call_list_for_type = []
if self._router.call_list:
for call in self._router.call_list:
if not call["new"]:
continue
if self.entity_description.key == call["type"]:
self._call_list_for_type.append(call)
self._attr_native_value = len(self._call_list_for_type)
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return device specific state attributes."""
return {
dt_util.utc_from_timestamp(call["datetime"]).isoformat(): call["name"]
for call in self._call_list_for_type
}
class FreeboxDiskSensor(FreeboxSensor):
"""Representation of a Freebox disk sensor."""
def __init__(
self,
router: FreeboxRouter,
disk: dict[str, Any],
partition: dict[str, Any],
description: SensorEntityDescription,
) -> None:
"""Initialize a Freebox disk sensor."""
super().__init__(router, description)
self._disk = disk
self._partition = partition
self._attr_name = f"{partition['label']} {description.name}"
self._unique_id = f"{self._router.mac} {description.key} {self._disk['id']} {self._partition['id']}"
@property
def device_info(self) -> DeviceInfo:
"""Return the device information."""
return DeviceInfo(
identifiers={(DOMAIN, self._disk["id"])},
model=self._disk["model"],
name=f"Disk {self._disk['id']}",
sw_version=self._disk["firmware"],
via_device=(
DOMAIN,
self._router.mac,
),
)
@callback
def async_update_state(self) -> None:
"""Update the Freebox disk sensor."""
self._attr_native_value = round(
self._partition["free_bytes"] * 100 / self._partition["total_bytes"], 2
)
| aronsky/home-assistant | homeassistant/components/freebox/sensor.py | Python | apache-2.0 | 5,841 |
#!/usr/bin/python
import os
import sys
import csv
## DJANGO SETUP
##
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'flipside.settings')
import django
django.setup()
##
##
def update_poem(poem, uuid):
p = Poem.objects.get(uuid=uuid)
if p.text != poem:
print("Poem [{}], has bad characters".format(uuid))
# p.text = poem
# p.save()
poems = 0
from frbb.models import Poem
for poem in Poem.objects.all():
ascii_only_text = poem.text.encode('ascii', 'ignore')
update_poem(ascii_only_text, poem.uuid)
poems += 1
print("Converted {} poems in database".format(poems))
| seriouscamp/frbb | web/tools/convert_text.py | Python | apache-2.0 | 613 |
import numpy as np
import math
import time
class SIFTDescriptor(object):
"""Class for computing SIFT descriptor of the square patch
Attributes:
patchSize: size of the patch in pixels
maxBinValue: maximum descriptor element after L2 normalization. All above are clipped to this value
numOrientationBins: number of orientation bins for histogram
numSpatialBins: number of spatial bins. The final descriptor size is numSpatialBins x numSpatialBins x numOrientationBins
"""
def precomputebins(self):
halfSize = int(self.patchSize/2)
ps = self.patchSize
sb = self.spatialBins;
step = float(self.spatialBins + 1) / (2 * halfSize)
precomp_bins = np.zeros(2*ps, dtype = np.int32)
precomp_weights = np.zeros(2*ps, dtype = np.float)
precomp_bin_weights_by_bx_py_px_mapping = np.zeros((sb,sb,ps,ps), dtype = np.float)
for i in range(ps):
i1 = i + ps
x = step * i
xi = int(x)
# bin indices
precomp_bins[i] = xi -1;
precomp_bins[i1] = xi
#bin weights
precomp_weights[i1] = x - xi;
precomp_weights[i] = 1.0 - precomp_weights[i1];
#truncate
if (precomp_bins[i] < 0):
precomp_bins[i] = 0;
precomp_weights[i] = 0
if (precomp_bins[i] >= self.spatialBins):
precomp_bins[i] = self.spatialBins - 1;
precomp_weights[i] = 0
if (precomp_bins[i1] < 0):
precomp_bins[i1] = 0;
precomp_weights[i1] = 0
if (precomp_bins[i1] >= self.spatialBins):
precomp_bins[i1] = self.spatialBins - 1;
precomp_weights[i1] = 0
for y in range(ps):
for x in range(ps):
precomp_bin_weights_by_bx_py_px_mapping[precomp_bins[y], precomp_bins[x], y, x ] += precomp_weights[y]*precomp_weights[x]
precomp_bin_weights_by_bx_py_px_mapping[precomp_bins[y+ps], precomp_bins[x], y, x ] += precomp_weights[y+ps]*precomp_weights[x]
precomp_bin_weights_by_bx_py_px_mapping[precomp_bins[y], precomp_bins[x+ps], y, x ] += precomp_weights[y]*precomp_weights[x+ps]
precomp_bin_weights_by_bx_py_px_mapping[precomp_bins[y+ps], precomp_bins[x+ps], y, x ] += precomp_weights[y+ps]*precomp_weights[x+ps]
mask = self.CircularGaussKernel(kernlen=self.patchSize)
for y in range(sb):
for x in range(sb):
precomp_bin_weights_by_bx_py_px_mapping[y,x,:,:] *= mask
precomp_bin_weights_by_bx_py_px_mapping[y,x,:,:] = np.maximum(0,precomp_bin_weights_by_bx_py_px_mapping[y,x,:,:])
return precomp_bins.astype(np.int32),precomp_weights,precomp_bin_weights_by_bx_py_px_mapping,mask
def __init__(self, patchSize = 41, maxBinValue = 0.2, numOrientationBins = 8, numSpatialBins = 4):
self.patchSize = patchSize
self.maxBinValue = maxBinValue
self.orientationBins = numOrientationBins
self.spatialBins = numSpatialBins
self.precomp_bins,self.precomp_weights,self.mapping,self.mask = self.precomputebins()
self.binaryMask = self.mask > 0
self.gx = np.zeros((patchSize,patchSize), dtype=np.float)
self.gy = np.zeros((patchSize,patchSize), dtype=np.float)
self.ori = np.zeros((patchSize,patchSize), dtype=np.float)
self.mag = np.zeros((patchSize,patchSize), dtype=np.float)
self.norm_patch = np.zeros((patchSize,patchSize), dtype=np.float)
ps = self.patchSize
sb = self.spatialBins
ob = self.orientationBins
self.desc = np.zeros((ob, sb , sb ), dtype = np.float)
return
def CircularGaussKernel(self, kernlen=21):
halfSize = kernlen / 2;
r2 = halfSize*halfSize;
sigma2 = 0.9 * r2;
disq = 0;
kernel = np.zeros((kernlen,kernlen))
for y in range(kernlen):
for x in range(kernlen):
disq = (y - halfSize)*(y - halfSize) + (x - halfSize)*(x - halfSize);
if disq < r2:
kernel[y,x] = math.exp(-disq / sigma2)
else:
kernel[y,x] = 0
return kernel
def photonorm(self, patch, binaryMask = None):
if binaryMask is not None:
std1_coef = 50. / np.std(patch[binaryMask])
mean1 = np.mean(patch[binaryMask])
else:
std1_coef = 50. / np.std(patch)
mean1 = np.mean(patch)
if std1_coef >= 50. / 0.000001:
std1_coef = 50.0
self.norm_patch = 128. + std1_coef * (patch - mean1);
self.norm_patch = np.clip(self.norm_patch, 0.,255.);
return
def getDerivatives(self,image):
#[-1 1] kernel for borders
self.gx[:,0] = image[:,1] - image[:,0]
self.gy[0,:] = image[1,:] - image[0,:]
self.gx[:,-1] = image[:,-1] - image[:,-2]
self.gy[-1,:] = image[-1,:] - image[-2,:]
#[-1 0 1] kernel for the rest
self.gy[1:-2,:] = image[2:-1,:] - image[0:-3,:]
self.gx[:,1:-2] = image[:,2:-1] - image[:,0:-3]
self.gx *= 0.5
self.gy *= 0.5
return
def samplePatch(self,grad,ori):
ps = self.patchSize
sb = self.spatialBins
ob = self.orientationBins
o_big = float(ob) * (ori + 2.0*math.pi) / (2.0 * math.pi)
bo0_big = np.floor(o_big)#.astype(np.int32)
wo1_big = o_big - bo0_big;
bo0_big = bo0_big % ob;
bo1_big = (bo0_big + 1.0) % ob;
wo0_big = 1.0 - wo1_big;
wo0_big *= grad;
wo0_big = np.maximum(0, wo0_big)
wo1_big *= grad;
wo1_big = np.maximum(0, wo1_big)
ori_weight_map = np.zeros((ob,ps,ps))
for o in range(ob):
relevant0 = np.where(bo0_big == o)
ori_weight_map[o, relevant0[0], relevant0[1]] = wo0_big[relevant0[0], relevant0[1]]
relevant1 = np.where(bo1_big == o)
ori_weight_map[o, relevant1[0], relevant1[1]] += wo1_big[relevant1[0], relevant1[1]]
for y in range(sb):
for x in range(sb):
self.desc[:,y,x] = np.tensordot( ori_weight_map, self.mapping[y,x,:,:])
return
def describe(self,patch, userootsift = False, flatten = True, show_timings = False):
t = time.time()
self.photonorm(patch, binaryMask = self.binaryMask);
if show_timings:
print('photonorm time = ', time.time() - t)
t = time.time()
self.getDerivatives(self.norm_patch)
if show_timings:
print('gradients time = ', time.time() - t)
t = time.time()
self.mag = np.sqrt(self.gx * self.gx + self.gy*self.gy)
self.ori = np.arctan2(self.gy,self.gx)
if show_timings:
print('mag + ori time = ', time.time() - t)
t = time.time()
self.samplePatch(self.mag,self.ori)
if show_timings:
print('sample patch time = ', time.time() - t)
t = time.time()
self.desc /= np.linalg.norm(self.desc.flatten(),2)
self.desc = np.clip(self.desc, 0,self.maxBinValue);
self.desc /= np.linalg.norm(self.desc.flatten(),2)
if userootsift:
self.desc = np.sqrt(self.desc / np.linalg.norm(unnorm_desc.flatten(),1))
if show_timings:
print('clip and norm time = ', time.time() - t)
t = time.time()
if flatten:
return np.clip(512. * self.desc.flatten() , 0, 255).astype(np.int32);
else:
return np.clip(512. * self.desc , 0, 255).astype(np.int32);
| spongezhang/vlb | python/features/numpy_sift.py | Python | bsd-2-clause | 7,728 |
# -*- coding: utf-8 -*-
# vim: set ts=4
from django.db import models
class Structure(models.Model):
class Meta:
app_label = 'DUlSine'
ordering = ('numero', )
numero = models.IntegerField(primary_key = True)
nom = models.CharField(max_length = 50)
adresse = models.CharField(max_length = 500)
parent = models.ForeignKey('Structure', null = True, blank = True)
def __unicode__(self):
return u"%d : %s" % (self.numero, self.nom)
| DUlSine/DUlSine | DUlSine/models/structure.py | Python | agpl-3.0 | 478 |
import module
module.
| vheon/ycmd | ycmd/tests/python/testdata/project/__main__.py | Python | gpl-3.0 | 23 |
# -*- coding: utf-8 -*-
from .app import PersonalAccessTokenManager
| soasme/flask-personal-access-token | flask_personal_access_token/__init__.py | Python | mit | 69 |
"""Common profiles are defined here to be easily used within a project using --profile {name}"""
from typing import Any, Dict
black = {
"multi_line_output": 3,
"include_trailing_comma": True,
"force_grid_wrap": 0,
"use_parentheses": True,
"ensure_newline_before_comments": True,
"line_length": 88,
}
django = {
"combine_as_imports": True,
"include_trailing_comma": True,
"multi_line_output": 5,
"line_length": 79,
}
pycharm = {
"multi_line_output": 3,
"force_grid_wrap": 2,
"lines_after_imports": 2,
}
google = {
"force_single_line": True,
"force_sort_within_sections": True,
"lexicographical": True,
"single_line_exclusions": ("typing",),
"order_by_type": False,
"group_by_package": True,
}
open_stack = {
"force_single_line": True,
"force_sort_within_sections": True,
"lexicographical": True,
}
plone = {
"force_alphabetical_sort": True,
"force_single_line": True,
"lines_after_imports": 2,
"line_length": 200,
}
attrs = {
"atomic": True,
"force_grid_wrap": 0,
"include_trailing_comma": True,
"lines_after_imports": 2,
"lines_between_types": 1,
"multi_line_output": 3,
"use_parentheses": True,
}
hug = {
"multi_line_output": 3,
"include_trailing_comma": True,
"force_grid_wrap": 0,
"use_parentheses": True,
"line_length": 100,
}
profiles: Dict[str, Dict[str, Any]] = {
"black": black,
"django": django,
"pycharm": pycharm,
"google": google,
"open_stack": open_stack,
"plone": plone,
"attrs": attrs,
"hug": hug,
}
| TeamSPoon/logicmoo_workspace | packs_web/butterfly/lib/python3.7/site-packages/isort/profiles.py | Python | mit | 1,601 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
""" Tests for distribution util functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf # pylint: disable=g-bad-import-order
from official.utils.misc import distribution_utils
class GetDistributionStrategyTest(tf.test.TestCase):
"""Tests for get_distribution_strategy."""
def test_one_device_strategy_cpu(self):
ds = distribution_utils.get_distribution_strategy(0)
self.assertEquals(ds.num_replicas, 1)
self.assertEquals(len(ds.worker_devices), 1)
self.assertIn('CPU', ds.worker_devices[0])
def test_one_device_strategy_gpu(self):
ds = distribution_utils.get_distribution_strategy(1)
self.assertEquals(ds.num_replicas, 1)
self.assertEquals(len(ds.worker_devices), 1)
self.assertIn('GPU', ds.worker_devices[0])
def test_mirrored_strategy(self):
ds = distribution_utils.get_distribution_strategy(5)
self.assertEquals(ds.num_replicas, 5)
self.assertEquals(len(ds.worker_devices), 5)
for device in ds.worker_devices:
self.assertIn('GPU', device)
class PerDeviceBatchSizeTest(tf.test.TestCase):
"""Tests for per_device_batch_size."""
def test_batch_size(self):
self.assertEquals(
distribution_utils.per_device_batch_size(147, num_gpus=0), 147)
self.assertEquals(
distribution_utils.per_device_batch_size(147, num_gpus=1), 147)
self.assertEquals(
distribution_utils.per_device_batch_size(147, num_gpus=7), 21)
def test_batch_size_with_remainder(self):
with self.assertRaises(ValueError):
distribution_utils.per_device_batch_size(147, num_gpus=5)
if __name__ == "__main__":
tf.test.main()
| mlperf/training_results_v0.5 | v0.5.0/google/cloud_v3.8/resnet-tpuv3-8/code/resnet/model/models/official/utils/misc/distribution_utils_test.py | Python | apache-2.0 | 2,394 |
#
# FLOGGER
#
# This program reads records from the OGN network processing only
# those received from a specified site and registration marks, eg aircraft belonging to
# a specific club.
# It writes each record to a database and at the end of each day process
# them to determine the flight times of each flight for each machine.
# Phase 1 will just collect the data.
# Phase 2 will process the data into a new table
# Phase 3 will then format that information with the intention of
# it being used to be checked against the manual log books.
# Phase 4 will remove old flight and track file older than a certain date
# The intention is that it will collect the data between the hours of daylight,
# producing the summary at the end of the day.
# This program could be run on a Raspberry Pi as it is so low powered
#
# Altitude in metres.
# Land speed in km/h.
# Latitude, west is negative decimal degrees.
# Longitude, south is negative decimal degrees.
#
# This program is covered by the GNU GENERAL PUBLIC LICENSE.
# See the file 'LICENSE' for details
#
#
# 20150312: First working version
# Usage: Run flogger.py to collect the daily flight data then
# run process.py which processes the raw data into a table flights in the database flogger.sgl3
# This first version is very experimental, it is proof of concept and processes. The code needs to
# be 'improved'.
# To be done: 1) The program should be run each day between 0900 and sunset. This should be handled by cron
# to start the program at a time specified in settings which then calculates sunrise and suspends
# until then. Once running the program determines sunset and stopping itself at that time. It also needs
# to handle power outages (not sure how at the moment)
# 2) The Flarm code to registration code needs to addressed using OGNs new database.
# 20150505 Second working version
# Only need to run flogger.py, it now handles collection of data during daylight hours and processes
# after sunset (assumes gliders only fly during daylight hours)
# Now reads aircraft registration data from Flarmnet to build own internal table
# 20150515 Third working version
# 1) APRS user and APRS passcode have to be supplied on the command line and not in settings
# 2) Changes to flogger_process_log_old to correct errors - still in testing
#
# 20150520 Fourth working version (V0.1.0)
# 1) On aircraft stop set altitude to initial value else highest value for any flight of the day
# will be the one compared against as the maximum and not the max for a specific flight.
# Bug 20150520-1 Assigned
# 2) Flights table only contains flights for one day and not all previous days flights
# Bug 20150520-2 Assigned
#
# 20150527 Fifth working version (V0.1.1)
# Test version for:
# 1) Bug 20150520-1
# 2) Bug 20150520-2
#
# 20150529 First beta test version (V0.2.0)
# 1) Bug 20150520-1 Solved
# 2) Bug 20150520-2 Solved
# 3) Enhancement - dump days flights table as .csv file
#
# 20150530 Correction to first beta test version (V0.2.1)
# 1) Correction to dump flights to .csv - to make it work!
#
# 20150604 Added enhancements to version V0.2 (V0.2.2)
# 1) Allowance for short duration flight
# 2) Use of geocoding to determine airfield position data - proposed by D.Spreitz
#
# To be done: 1) Tidy up code, remove all redundant testing comments
# 2) A lot more testing - some features might still not work!
# 3) Consider how this may be run as a service with standard start, stop etc options
# 4) Consider adding full logging with levels
# 5) Review the algorithm to determine if aircraft is on the ground. At the moment it determines
# this by the GPS ground speed being zero (ie below a defined value); the ground speed could be zero
# if the wind speed and airspeed are the same but opposite, eg when ridge flying. The algorithm could use
# the altitude as well, eg if ground speed is zero but altitude is greater than home airfield altitude then
# 'we're flying'. Note this still has issues!
# 6) Need to consider sending 'keep alives' when in the sleep state. Solved, not needed
# 7) There's a problem concerning character codes when building the flarm database which needs solving, only show in 1 record
#
# 20160208 1) Add modification to sequence tracks per flight by flarm record timestamp. Using multiple beacons can result in
# track points that are out of sequence when based on order received due to Internet time delays, hence
# use the GPS timestamp recorded in the data taken and sent by flarm (assumes timestamp is from Flarm!).
# 2) Also added graceful exit on Cntrl-C
#
# 20160323 1) Added optional output of track data in IGC format
# 2) Added optional deletion of old flight .csv and track .csv/.igc files
#
# 20160514 1) Use $ pipreqs --force /path/to/project to generate requirements.txt for pip install
#
# 20160518 1) Added attempt to load earlier version Linux libfap if current fails
#
# 20161026 1) Added flogger_find_tug code. This tries to determine which tug, if any, launched a particular glider.
# Note this doesn't always get the right result, but then nor does OGN Flight Log! This could be due to tugs
# sometimes powering down if a launch is not imminent. Gliders are likely to be always powered on and Flarm operating.
# Hence when it becomes time to launch the tug powers up, Flarm is now on but takes some time for the signal to be
# acquired and put onto and processed by the APRS system. It is therefore possible for the launch to take place
# with the take-off times for tug and glider to be too far displaced (from the APRS data) for flogger-find-tug
# to determine the launch has happened. The solution is possibly to increase the time delta used between glider and
# tug take-off but this could result in false positives, some fine tuning maybe needed. Interested to know if
# OGN Flight Log has similar reasoning.
#
# 20161108 1) Rewrote phase 2 flight log processing to be much simpler. Phase 2 puts flights into the flight_group
# table such that all flights by a single aircraft have the same group id. This enables each flight to
# be determined to be a distinct flight from its predecessor or not.
#
# 20170201: 1) Added simple function test_YorN to test for Y|y or N|n
# 2) Started developing using Eclipse Neon.2 (4.6.2)
#
import socket
#from libfap import *
#import flogger_settings
import string
import datetime
import time
import sqlite3
import pytz
from datetime import timedelta
import sys
from flarm_db import flarmdb
from pysqlite2 import dbapi2 as sqlite
from open_db import opendb
import ephem
#from flogger_process_log_old import process_log
#from flogger_process_log import process_log
import argparse
from flogger_dump_flights import dump_flights
from flogger_dump_tracks import dump_tracks2
from flogger_get_coords import get_coords
from flogger_signals import sig_handler
import signal
import os
import os.path
from flogger_dump_IGC import dump_IGC
from flogger_email_log import email_log2
from flogger_landout import landout_check
from geopy.distance import vincenty
from flogger_email_msg import email_msg
from flogger_find_tug import find_tug
from flogger_test_YorN import test_YorN
from flogger_gui import *
from flogger_settings import *
from threading import Thread
from flogger_aprs_parser import *
#
# This added to make it simpler after going to gui version
#
global settings
class flogger3(MyApp):
def __init__(self, interval=1):
print "init flogger3"
print "flogger3 initialized"
return
def flogger_run(self, settings):
print "flogger_run called"
print "settings.FLOGGER_SMTP_SERVER_URL: ", settings.FLOGGER_SMTP_SERVER_URL
# print "settings.FLOGGER_SMTP_SERVER_PORT: ", settings.FLOGGER_SMTP_SERVER_PORT
# print "settings.FLOGGER_DB_SCHEMA: ", settings.FLOGGER_DB_SCHEMA
self.thread = Thread(target=self.flogger_start, name= "flogger", args=(settings,))
print "Thread setup"
self.thread.daemon = True # Daemonize thread
self.thread.start()
print "flogger thread running"
return
def floggerStop(self):
print "floggerStop called"
# libfap_cleanup()
return
# def flogger_start(self, settings):
# def flogger_start(self, settings):
def flogger_start(self, local_settings):
print "flogger_start called\n"
settings = local_settings
# print "settings.FLOGGER_SMTP_SERVER_URL: ", settings.FLOGGER_SMTP_SERVER_URL
# print "settings.FLOGGER_SMTP_SERVER_PORT: ", settings.FLOGGER_SMTP_SERVER_PORT
# print "settings.FLOGGER_DB_SCHEMA: ", settings.FLOGGER_DB_SCHEMA
prev_vals = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0}
nprev_vals = {"G-CKLW": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0},
"G-CKFN": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
}
values = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0}
nvalues = {"G-CKLW": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0},
"G-CKFN": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
}
L_SMALL = float(0.001) # Small latitude or longitude delta of a 0.001 degree
A_SMALL = float(0.01) # Small altitude delta of 0.01 a metre, ie 1cm
V_SMALL = float(settings.FLOGGER_V_SMALL) # Small velocity delta of 10.0 kph counts as zero ie not moving
V_TAKEOFF_MIN = float(settings.FLOGGER_V_TAKEOFF_MIN)
V_LANDING_MIN = float(settings.FLOGGER_V_LANDING_MIN)
frst_time = False
AIRFIELD = "SuttonBnk"
flight_no = {} # A dictionary {callsign: flight_no}
track_no = {} # A dictionary {callsign: track_no}
# Coded 001-099: Gliders,
# 101-199: Tugs,
# 201-299: Motor Gliders,
# 301-399: Other
aircraft = {"G-CKLW": 1, "G-CKLN": 2, "G-CJVZ": 3, "G-CHEF": 4, "G-CKFN": 5,
"G-CHVR": 6, "G-CKJH": 7, "G-CKRN": 8, "G-CGBK": 9, "G-CDKC": 10,
"G-BFRY": 101, "G-BJIV": 102, "G-MOYR": 103,
"G-OSUT": 201,
"FLRDDF9C4": 301, "FLRDDE5FC": 302, "FLRDDBF13": 303, "FLRDDA884": 304, "FLRDDA886": 305, "FLRDDACAE": 306, "FLRDDA7E9": 307,
"FLRDDABF7": 308, "FLRDDE671": 309}
def CheckPrev(callsignKey, dataKey, value):
print "CheckPrev if callsign in nprev_vals: ", callsignKey, " key: ", dataKey, " Value: ", value
if nprev_vals.has_key(callsignKey) == 1:
print "nprev_vals already has entry: ", callsignKey
else:
print "nprev_vals doesn't exist for callsignKey: ", callsignKey
nprev_vals[callsignKey] = {}
nprev_vals[callsignKey] = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
nprev_vals[callsignKey][dataKey] = value
print "nprev_vals for callsignKey: ", callsignKey, " is: ", nprev_vals[callsignKey]
# print "nprev_vals is now: ", nprev_vals
return
def CheckVals(callsignKey, dataKey, value):
print "CheckVals if callsign in nvalues: ", callsignKey, " key: ", dataKey, " Value: ", value
if nvalues.has_key(callsignKey) == 1:
print "nvalues already has entry: ", callsignKey
else:
print "nvalues doesn't exist for callsignKey: ", callsignKey
nvalues[callsignKey] = {}
nvalues[callsignKey] = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
nvalues[callsignKey][dataKey] = value
print "nvalues for callsignKey: ", callsignKey, " is: ", nvalues[callsignKey]
# print "nvalues is now: ", nvalues
return
def isDayLight ():
return True
def fleet_check(call_sign):
if aircraft.has_key(call_sign):
return True
else:
return False
def comp_vals(set1, set2):
# Works out if the difference in positions is small and both speeds are close to zero
# Return True is yes and False if no
# Set1 are new values, set2 old values
print "Set1 value for key latitude is: ", set1["latitude"], " value: ", float(set1["latitude"])
# lat1 = float(set1["latitude"])
# lat2 = float(set2["latitude"])
delta_latitude = float(set1["latitude"]) - float(set2["latitude"])
delta_longitude = float(set1["longitude"]) - float(set2["longitude"])
delta_altitude = float(set1["altitude"]) - float(set2["altitude"])
delta_speed = float(set1["speed"]) - float(set2["speed"])
print "Delta positions. Lat: ", delta_latitude, " Long: ", delta_longitude, " Alt: ", delta_altitude, " Speed: ", delta_speed
# if (delta_latitude < L_SMALL) and (delta_longitude < L_SMALL) and (delta_altitude < A_SMALL) and (delta_speed < V_SMALL):
if delta_speed <> 0.0:
print "Delta speed not zero, check others"
# if (delta_latitude == 0.0) and (delta_longitude == 0.0) and (delta_altitude == 0.0) and (delta_speed == 0.0):
if (delta_latitude == 0.0) and (delta_longitude == 0.0) and (delta_altitude == 0.0):
print "Positions same"
return True
else:
print "Positions different"
return False
else:
print "Delta speed zero, return same"
return True
def set_keepalive(sock, after_idle_sec=1, interval_sec=3, max_fails=5):
"""Set TCP keepalive on an open socket.
It activates after 1 second (after_idle_sec) of idleness,
then sends a keepalive ping once every 3 seconds (interval_sec),
and closes the connection after 5 failed ping (max_fails), or 15 seconds
"""
print "set_keepalive for idle after: ", after_idle_sec
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, after_idle_sec)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, interval_sec)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, max_fails)
return
def is_dst(zonename):
# Determine if in daylight
tz = pytz.timezone(zonename)
now = pytz.utc.localize(datetime.utcnow())
return now.astimezone(tz).dst() != timedelta(0)
def fleet_check_new(callsign):
#
# This has become a little confusing! If FLOGGER_FLEET_CHECK == n|N then FLOGGER_AIRFIELD_NAME is not used in
# the flarm_db search so a non-fleet aircraft can be found, but the later code checks whether the aircraft
# has taken off at FLOGGER_AIRFIELD_NAME; if it hasn't it won't be included in the flights, if it has it will.
#
# This logic and code needs to be re-thought!
# Note, there is a difference between aircraft registered to a location and in a designated 'fleet' for
# that location and whether the aircraft has taken off from a location.
# The fleet_check is intended to check whether an aircraft is a member of a designated fleet, not whether
# it has taken off from the designated location. The intention if the fleet check is to enable recording only
# flights undertaken by the club fleet.
#
print "In fleet check for: ", callsign
# cursor.execute('''SELECT ROWID FROM aircraft WHERE registration =? or flarm_id=? ''', (callsign,callsign,))
# row = cursor.fetchone()
# flarm_id = callsign[3:]
# print "search for flarm_id: ", flarm_id
# cursor.execute('''SELECT ROWID FROM flarm_db WHERE flarm_id =?''', (flarm_id,))
# if settings.FLOGGER_FLEET_CHECK == "N" or settings.FLOGGER_FLEET_CHECK == "n":
if not test_YorN(settings.FLOGGER_FLEET_CHECK):
print "Fleet Check: ", settings.FLOGGER_FLEET_CHECK
fleet_name = "Fleet Name: Not used"
cursor.execute('''SELECT ROWID, registration FROM flarm_db WHERE registration =? OR flarm_id =? ''', (callsign,callsign[3:],))
else:
print "Fleet Check for Airfield: ", settings.FLOGGER_AIRFIELD_NAME
fleet_name = settings.FLOGGER_AIRFIELD_NAME
cursor.execute('''SELECT ROWID FROM flarm_db WHERE registration =? OR flarm_id =? AND airport=?''', (callsign,callsign[3:],settings.FLOGGER_AIRFIELD_NAME,))
#cursor.execute('''SELECT ROWID FROM flarm_db WHERE registration =? OR flarm_id =? AND airport=?''', (callsign,callsign[3:],settings.FLOGGER_AIRFIELD_NAME,))
row1 = cursor.fetchone()
if row1 == None:
print "Registration not found in flarm_db: ", callsign, " for: ", fleet_name
return False
else:
print "Aircraft: ", callsign, " found in flarm db at: ", row1[0], " for: ", fleet_name
reg = callsign_trans(callsign)
# if settings.FLOGGER_FLEET_CHECK <> "N":
print "settings.FLOGGER_FLEET_CHECK: ", settings.FLOGGER_FLEET_CHECK
# if not test_YorN(settings.FLOGGER_FLEET_CHECK):
if test_YorN(settings.FLOGGER_FLEET_CHECK):
# if settings.FLOGGER_FLEET_LIST[reg] > 100 and settings.FLOGGER_FLEET_LIST[reg] < 200 and settings.FLOGGER_LOG_TUGS == "N":
if settings.FLOGGER_FLEET_LIST[reg] > 100 and settings.FLOGGER_FLEET_LIST[reg] < 200 and (not test_YorN(settings.FLOGGER_LOG_TUGS)):
print "Don't log tug: %s" % reg
return False
else:
print "Tug flight: ", reg
# At least 1 match for the callsign has been found
return True
def callsign_trans(callsign):
# Translates a callsign supplied as a flarm_id
# into the aircraft registration using a local db based on flarmnet or OGN
# Note if OGN db is being used then callsigns don't start with FLR or ICA, this is denoted by the 'Type' field
# cursor.execute('''SELECT registration, flarm_id FROM aircraft WHERE registration =? or flarm_id=? ''', (callsign,callsign,))
if callsign.startswith("FLR") or callsign.startswith("ICA") :
# Callsign starts with "FLR" or ICA so remove it
str = callsign[3:]
ncallsign = "%s" % str
print "Removing FLR or ICA string. Callsign is now: ", ncallsign
else:
ncallsign = "%s" % callsign
cursor.execute('''SELECT registration FROM flarm_db WHERE flarm_id=? ''', (ncallsign,))
row = cursor.fetchone()
if row <> None:
# Registration found for flarm_id so return registration
registration = "%s" % row
print "In flarm db return: ", registration
return registration
else:
# Registration not found for flarm_id so return flarm_id
print "Not in flarm db return: ", callsign
return ncallsign
def APRS_connect (settings):
#
#-----------------------------------------------------------------
# Connect to the APRS server to receive flarm data
#-----------------------------------------------------------------
#
# create socket & connect to server
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
set_keepalive(sock, after_idle_sec=60, interval_sec=3, max_fails=5)
sock.connect((settings.APRS_SERVER_HOST, settings.APRS_SERVER_PORT))
except Exception, e:
print "Socket failure on connect: ", e
print "Socket sock connected"
try:
# sock.send('user %s pass %s vers OGN_Flogger 0.0.2 filter r/+54.228833/-1.209639/25\n ' % (settings.APRS_USER, settings.APRS_PASSCODE))
APRSparm = ('user %s pass %s vers %s %s filter r/%s/%s/%s\n ' % (settings.APRS_USER,
settings.APRS_PASSCODE,
settings.FLOGGER_NAME,
settings.FLOGGER_VER,
settings.FLOGGER_LATITUDE,
settings.FLOGGER_LONGITUDE,
settings.FLOGGER_RAD))
# print "APRSparm is: ", APRSparm
# s = "user %s pass %s vers OGN_Flogger 0.2.2 filter r/%s/%s/25\n " % (settings.APRS_USER, settings.APRS_PASSCODE, settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE)
# print "Socket connect string is: ", s
sock.send(APRSparm)
except Exception, e:
print "Socket send failure: ", e
exit()
print "Socket send ok"
# Make the connection to the server
# start_time = datetime.datetime.now()
# keepalive_time = time.time()
# sock_file = sock.makefile()
print "APRS connection made"
return sock
def addTrack(cursor,flight_no,track_no,longitude,latitude,altitude,course,speed,timeStamp):
#
#-----------------------------------------------------------------
# Add gps track data to track record if settings.FLOGGER_TRACK is "Y" ie yes
# and if flight_no != None which it will be if flight has not taken off at FLOGGER_AIRFIELD_NAME
#-----------------------------------------------------------------
#
# dt = str(datetime.datetime.now()) # Get the datetime this track point is created as string
# sdt = dt[0:10] + "T" + dt[11:19] + "Z" # Convert to string format for gpx, ie YYYY-MM-DDTHH:MM:SSZ
# sdt = "%sT%sZ" % (dt[0:10],dt[11:19]) # Convert to string format for gpx, ie YYYY-MM-DDTHH:MM:SSZ
if settings.FLOGGER_TRACKS == "Y" and flight_no != None:
print "Flight_no is: ", flight_no
print "Track point nos is: ", track_no
# dt = str(datetime.datetime.now()) # Get the datetime this track point is created as string
# sdt = dt[0:10] + "T" + dt[11:19] + "Z" # Convert to string format for gpx, ie YYYY-MM-DDTHH:MM:SSZ
# This print doesn't work as one of the values is of none-type, not sure why?
# print "Adding track data to: %i, %i, %f, %f, %f, %f %f " % (flight_no,track_no,latitude,longitude,altitude,course,speed)
try:
cursor.execute('''INSERT INTO track(flight_no,track_no,latitude,longitude,altitude,course,speed,timeStamp)
VALUES(:flight_no,:track_no,:latitude,:longitude,:altitude,:course,:speed,:timeStamp)''',
{'flight_no':flight_no,'track_no':track_no,'latitude':latitude,'longitude':longitude,'altitude':altitude,'course':course,'speed':speed,'timeStamp':timeStamp})
except:
print "Add trackpoint failed on insert: ignore trackpoint"
else:
print "Don't add track point"
return
def endTrack():
return
def CheckTrackData(cursor, flight_no, track_no, callsignKey):
# print "check flight_no if callsign in flight_no{}: ", flight_no, " Track_no is: ", track_no, " CallsignKey is: ", callsignKey
if flight_no.has_key(callsignKey) == 1:
print "flight_no already has entry: ", callsignKey
else:
try:
cursor.execute('''SELECT max(id) FROM flight_log2 WHERE src_callsign =?''', (callsignKey,))
except:
print "!!!ERROR - No record in flight_log2 for: ", callsignKey
# If this crashes need to think about adding record for flight_log2, but why?
exit()
row_id = cursor.fetchone()[0] # value of id for row just inserted use as flight_no for flight
print "Last row ID of flight_log2 for callsign: ", callsignKey, " inserted was: ", row_id
flight_no[src_callsign] = row_id
track_no[callsignKey] = 1
print "flight_no for callsignKey: ", callsignKey, " is: ", flight_no[callsignKey]
return
def check_position_packet (packet_str):
#
#-----------------------------------------------------------------
# This function determines if airfield is in the list of APRS
# base stations used for receiving position fixes.
#
# base_list should be set up as part of the main code initialisation
#-----------------------------------------------------------------
#
# for base in APRS_base_list:
for base in settings.FLOGGER_APRS_BASES:
if string.find(str(packet_str), base) <> -1:
print "Found in list of APRS base stations: ", base
return base
print "Not found base station in packet"
return -1
def delete_table (table):
#
#-----------------------------------------------------------------
# This function deletes the SQLite3 table
# with the name supplied by "table".
#-----------------------------------------------------------------
#
# print "delete_table. settings.FLOGGER_MODE: ", settings.FLOGGER_MODE
if settings.FLOGGER_MODE == "test":
print "Test only. Table %s not deleted" % (table)
return
parm = "DELETE FROM %s" % (table)
try:
cursor.execute(parm)
print "New Delete %s table ok" % (table)
except:
print "New Delete %s table failed or no records in tables" % (table)
return
def delete_flogger_file(folder, filename, days):
#
#-----------------------------------------------------------------
# This function deletes the files whose name contain filename in folder folder
# if they were created up to and including the number of days in the past
# specified by the days parameter.
# If days is zero then no deletions are performed
#-----------------------------------------------------------------
#
print "folder: ", folder
print "filename: ", filename
if days <= 0:
print "Don't delete old files, return"
return
now = time.time()
path = os.path.dirname(os.path.abspath(__file__))
if os.path.isdir(os.path.join(path, folder)):
# flist = os.listdir(folder)
flist = os.listdir(os.path.join(path, folder))
else:
print "Not found: ", folder
return
## print "delete flist: ", flist
for f in flist:
# print "Pathname is: ", os.path.join(folder, f), " st_mtime is: ", os.stat(os.path.join(folder, f)).st_mtime
full_file = os.path.join(folder, f)
file_find = string.find(full_file, filename) <> -1
file_time = os.stat(full_file).st_mtime
# print "File_find is: ", file_find, ". File_time is: ", file_time, "Now is: ", now - days * 86400
if (file_find == True) and (file_time <= (now - days * 86400)):
print "Delete file: ", full_file
os.remove(full_file)
# else:
# print "File not deleted: %s" % full_file
return
def connect_APRS(sock):
#
#-----------------------------------------------------------------
#
# This function tries to shutdown the specified sock and if it
# fails closes it and then creates a new one and reconnects to the APRS system
#
#-----------------------------------------------------------------
#
try:
sock.shutdown(0)
except socket.error, e:
if 'not connected' in e:
print '*** Transport endpoint is not connected ***'
print "socket no longer open so can't be closed, create new one"
else:
print "Socket still open so close it"
sock.close()
print "Create new socket"
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((settings.APRS_SERVER_HOST, settings.APRS_SERVER_PORT))
except Exception, e:
print "Connection refused. Errno: ", e
exit()
APRSparm = ('user %s pass %s vers %s %s filter r/%s/%s/%s\n ' % (settings.APRS_USER,
settings.APRS_PASSCODE,
settings.FLOGGER_NAME,
settings.FLOGGER_VER,
settings.FLOGGER_LATITUDE,
settings.FLOGGER_LONGITUDE,
settings.FLOGGER_RAD))
# print "APRSparm is: ", APRSparm
# sock.send('user %s pass %s vers Python_Example 0.0.1 filter r/+54.228833/-1.209639/25\n ' % (settings.APRS_USER, settings.APRS_PASSCODE))
sock.send(APRSparm)
# Make the connection to the server
sock_file = sock.makefile()
return sock_file
#
#-----------------------------------------------------------------
# Start of main code
#-----------------------------------------------------------------
#
print "FLOGGER_AIRFIELD_NAME from class is: " + settings.FLOGGER_AIRFIELD_NAME
# path = os.path.dirname(os.path.abspath(__file__))
settings.FLOGGER_BS = os.path.dirname(os.path.abspath(__file__))
settings.FLOGGER_TRACKS_FOLDER = settings.FLOGGER_BS + "/tracks" # Setup 'tracks' folder name - not defined in settings class anymore
settings.FLOGGER_FLIGHTS_LOG = settings.FLOGGER_BS + "/flight_logs"
print "settings.FLOGGER_TRACKS_FOLDER: ", settings.FLOGGER_TRACKS_FOLDER
#
# User and passcode now mandatory positional parameters
# Mode is an optional positional parameter, default is "live"
#
# try:
# parser = argparse.ArgumentParser()
# parser.add_argument("--user", help="user and passcode must be supplied, see http://www.george-smart.co.uk/wiki/APRS_Callpass for how to obtain")
# parser.add_argument("--passcode", help="user and passcode must be supplied", type=int)
# parser.add_argument("--mode", help="mode is test or live, test modifies behaviour to add output for testing", default="test")
# parser.add_argument('-s', '--smtp', help="URL of smtp server")
# parser.add_argument('-t', '--tx', help="email address of sender")
# parser.add_argument('-r', '--rx', help="email address of receiver")
# except:
# print "Parsing cmd line args failed"
print "Cmd line args parsed"
try:
args = parser.parse_args()
#
# Check parameters. If an smtp server address is specified then the sender and receiver email
# addresses must also be supplied either in the call line or the config file
#
if (args.smtp == None and settings.FLOGGER_SMTP_SERVER_URL == ""):
print "SMTP url not specified, don't send email"
else:
print "Set to send email"
if (args.smtp <> None):
settings.FLOGGER_SMTP_SERVER_URL = args.smtp
if (args.tx <> None):
settings.FLOGGER_SMTP_TX = args.tx
if (args.rx <> None):
print "args.rx is: ", args.rx
settings.FLOGGER_SMTP_RX = args.rx
elif ((args.tx == None or args.rx == None) and (settings.FLOGGER_SMTP_TX == "" or settings.FLOGGER_SMTP_RX == "")):
print "Email option parameters or config not valid. smtp=%s, SERVER_URL=%s, tx=%s, rx=%s, SMTP_TX=%s, SMTP_RX=%s" % \
(args.smtp, settings.FLOGGER_SMTP_SERVER_URL, args.tx, args.rx, settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX)
print "Exit"
exit()
print "Email parameters are now: smtp=%s, SERVER_URL=%s, tx=%s, rx=%s, SMTP_TX=%s, SMTP_RX=%s" % \
(args.smtp, settings.FLOGGER_SMTP_SERVER_URL, args.tx, args.rx, settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX)
if (args.user <> None):
settings.APRS_USER = args.user
else:
print "Taken from APRS_USER: ", settings.APRS_USER
if args.passcode <> None:
settings.APRS_PASSCODE = args.passcode
else:
print "Taken from form APRS_PASSCODE: ", settings.APRS_PASSCODE
if args.mode <> None:
print "Taken from args.mode: ", settings.FLOGGER_MODE
settings.FLOGGER_MODE = args.mode
else:
print "Taken from FLOGGER_MODE: ", settings.FLOGGER_MODE
except :
print "Failed in command line arg parser"
# print "user=", args.user, " passcode=", args.passcode, "mode=", args.mode, "smtp=", args.smtp, "tx=", args.tx, "rx=", args.rx
# settings.APRS_USER = args.user
# settings.APRS_PASSCODE = args.passcode
# settings.FLOGGER_MODE = args.mode
# Creates or opens a file called flogger.sql3 as an SQLite3 DB
#
#-----------------------------------------------------------------
# Build flogger db using schema
# Delete SQLite3 database file if it already exists; stops it getting
# too large during testing
#-----------------------------------------------------------------
#
# if os.path.isfile(settings.FLOGGER_DB_NAME):
if os.path.isfile(settings.FLOGGER_DB_NAME) and settings.FLOGGER_MODE <> "test":
print "SQLite3 db file exists so delete it"
os.remove(settings.FLOGGER_DB_NAME)
else:
print "SQLite3 db file exists but in test mode so DON'T delete it!"
db = sqlite3.connect(settings.FLOGGER_DB_NAME)
cursor = db.cursor() # Get a cursor object
# f = open(settings.FLOGGER_DB_SCHEMA, 'rt') # Open the db schema file for reading
f = open(settings.FLOGGER_DB_SCHEMA, 'rt') # Open the db schema file for reading
schema = f.read()
cursor.executescript(schema)
## cursor.executescript(schema) ### # Build flogger db from schema
print "End of building db: ", settings.FLOGGER_DB_NAME, " using schema: ", settings.FLOGGER_DB_SCHEMA
#
#-----------------------------------------------------------------
# Build local database from flarmnet of aircraft
#-----------------------------------------------------------------
#
if flarmdb(settings.FLOGGER_FLARMNET_DB_URL, cursor, db, "flarm_data", settings) == True:
print "Flarmnet db built"
else:
print "Flarmnet db build failed, exit"
exit()
#
#-----------------------------------------------------------------
# Determine location details, latitude, longitude and elevation
#-----------------------------------------------------------------
#
if settings.FLOGGER_AIRFIELD_DETAILS <> "":
loc = get_coords(settings.FLOGGER_AIRFIELD_DETAILS)
i = 1
while loc == False and i<=100:
# while loc[2] == None:
# print "get_coords returned loc[2] as None, retry", " Retry count get_coords: ", i
print "get_coords returned False, retry", " Retry count get_coords: ", i
loc = get_coords(settings.FLOGGER_AIRFIELD_DETAILS)
i = i + 1
# time.sleep (1)
if loc == False:
if settings.FLOGGER_LATITUDE <> "" and settings.FLOGGER_LONGITUDE <> "" and settings.FLOGGER_QNH >=0 :
print "Geolocator failed use values from settings"
else:
print "Geoloactor failed and no value for lat, long, QNH. Run again, might work"
exit(2)
else:
settings.FLOGGER_LATITUDE = str(loc[0]) # Held as string
settings.FLOGGER_LONGITUDE = str(loc[1]) # Held as string
settings.FLOGGER_QNH = loc[2] # Held as number
if settings.FLOGGER_QNH == None:
print "Probable Geolocator error, set FLOGGER_QNH default 0, loc[2]: ", loc[2]
settings.FLOGGER_QNH = 0
exit(3)
print "Location is: ", settings.FLOGGER_AIRFIELD_DETAILS, " latitude: ", loc[0], " longitude: ", loc[1], " elevation: ", loc[2]
# print "Location is: ", settings.FLOGGER_AIRFIELD_DETAILS, " latitude: ", settings.FLOGGER_LATITUDE , \
# " longitude: ", settings.FLOGGER_LONGITUDE, " elevation: ", settings.FLOGGER_QNH
else:
print "Use location data from settings"
#
#-----------------------------------------------------------------
# Set up list of APRS base stations to be used
# (Note this code could be nicer but will do for now)
#-----------------------------------------------------------------
#
#APRS_base_list = [settings.FLOGGER_APRS_BASE_1,
# settings.FLOGGER_APRS_BASE_2,
# settings.FLOGGER_APRS_BASE_3,
# settings.FLOGGER_APRS_BASE_4,]
# APRS_base_list = settings.FLOGGER_APRS_BASES
#
#-----------------------------------------------------------------
# Initialise API for computing sunrise and sunset
#-----------------------------------------------------------------
#
location = ephem.Observer()
location.pressure = 0
#location.horizon = '-0:34' # Adjustments for angle to horizon
location.horizon = settings.FLOGGER_LOCATION_HORIZON # Adjustments for angle to horizon
location.lat = settings.FLOGGER_LATITUDE
location.lon = settings.FLOGGER_LONGITUDE
print "Location for ephem is: ", settings.FLOGGER_AIRFIELD_DETAILS, " latitude: ", location.lat, " longitude: ", location.lon, " elevation: ", settings.FLOGGER_QNH
date = datetime.datetime.now()
next_sunrise = location.next_rising(ephem.Sun(), date)
next_sunset = location.next_setting(ephem.Sun(), date)
print "Sunrise today: ", date, " is: ", next_sunrise
print "Sunset today: ", date, " is: ", next_sunset
#
#-----------------------------------------------------------------
# Make the connection to the APRS server
#-----------------------------------------------------------------
#
start_time = datetime.datetime.now()
keepalive_time = time.time()
#sock_file = sock.makefile()
print "Start time!"
sock = APRS_connect(settings)
sock_file = sock.makefile()
# print "libfap_init"
# rtn = libfap.fap_init()
# if rtn <> 0:
# print "Failed to connect to APRS, check parameters"
# exit()
# print "Libfap return: ", rtn
#
#-----------------------------------------------------------------
# Set up paths for data, logs and tracks
#-----------------------------------------------------------------
#
SB_DATA = "SB_data" + str(start_time)
SB_Log = "SB_Log" + str(start_time)
SB_DATA = str(SB_DATA).replace(" ","_")
SB_Log = str(SB_Log).replace(" ","_")
SB_DATA = str(SB_DATA).replace(":","-")
SB_Log = str(SB_Log).replace(":","-")
print "Checking log paths: ", settings.FLOGGER_LOG_PATH
if settings.FLOGGER_LOG_PATH <> "":
if not os.path.isdir(settings.FLOGGER_LOG_PATH):
print "Log path is not directory",
SB_DATA = os.path.abspath(settings.FLOGGER_LOG_PATH) + "/" + SB_DATA
SB_Log = os.path.abspath(settings.FLOGGER_LOG_PATH) + "/" + SB_Log
try:
#print "Creating log folder"
os.makedirs(settings.FLOGGER_LOG_PATH)
print "Created: ", settings.FLOGGER_LOG_PATH
except:
print "FLOGGER_LOG_PATH does not exist. Please check settings."
exit()
print "SB data file is: ", SB_DATA
print "SB log file is: ", SB_Log
#sys.stdout = open(SB_Log, 'w')
#print "Datafile open"
test = False
if test == True:
datafile = open (SB_DATA, 'rw')
print "In test mode"
else:
datafile = open (SB_DATA, 'w')
print "In live mode"
#
#-----------------------------------------------------------------
# Setup cntrl-c handler
#
#-----------------------------------------------------------------
#
# print "Setup cntrl-c handler"
# sig_handler(db, cursor)
#time.sleep(5) # Press Ctrl+c here # Just for testing
#
#-----------------------------------------------------------------
# Main loop reading data from APRS server and processing records
# This continues until sunset after which the data recorded is processed
#-----------------------------------------------------------------
#
i = 0
try:
# while 1:
while settings.FLOGGER_RUN:
print "FLOGGER_RUN: ", settings.FLOGGER_RUN
# for i in range(1000000):
i = i + 1
datetime_now = datetime.datetime.now()
previous_sunrise = location.previous_rising(ephem.Sun(), date).datetime()
next_sunrise = location.next_rising(ephem.Sun(), date).datetime()
previous_sunset = location.previous_setting(ephem.Sun(), date).datetime()
next_sunset = location.next_setting(ephem.Sun(), date).datetime()
# Set datetime to current time + FLOGGER_LOG_TIME_DELTA to start processing flight log
# that number of hours before sunset
log_datetime = datetime.datetime.now() + datetime.timedelta(hours=settings.FLOGGER_LOG_TIME_DELTA)
# print "Log datetime is: ", log_datetime
location.date = ephem.Date(log_datetime)
print "Ephem date is: ", location.date
s = ephem.Sun()
s.compute(location)
twilight = -6 * ephem.degree # Defn of Twilight is: Centre of Sun is 6, 12, 18 degrees below horizon (civil, nautical, astronomical)
# daylight = s.alt > twilight
# print "Just for testing aprs_parse"
# daylight = True
# if daylight:
if s.alt > twilight:
print "Is it light at Location? Yes", location, " Ephem date is: ", ephem.Date(location.date), " Next sunset at: ", location.next_setting(ephem.Sun())
else:
print "Is it light at Location? No", location, " Ephem date is: ", ephem.Date(location.date), " Next sunrise at: ", location.next_rising(ephem.Sun())
process_log(cursor,db, settings)
#
# Dump tracks from flights table as .gpx
# This updates each flight in flights table with trackfile name
#
print "Dump tracks"
dump_tracks2(cursor, db, settings)
dump_IGC(cursor, db, settings)
#
# Experimental. Find tug used for each launch
#
find_tug(cursor, db, settings)
print "Find tug phase end"
#
# Dump flights table as cvs file
# If no flights then returns ""
#
print "Dump flights table"
csv_file = dump_flights(settings)
#
# Email flights csv file if required
# email_log2 sorts out if there are no flights on any one day
# FLOGGER_SMTP_SERVER_TX is either set in config by user or value taken from cmd line --smtp parm.
#
if settings.FLOGGER_SMTP_SERVER_URL <> "":
print "Email today's flight log. RX: " + settings.FLOGGER_SMTP_RX
email_log2(settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX, csv_file, datetime.date.today(), settings)
else:
print "Don't email flight log, no flights"
#
# Delete entries from daily flight logging tables etc
#
delete_table("flight_log")
delete_table("flight_log2")
delete_table("flight_log_final")
delete_table("flight_group")
##DEL delete_table("flights")
delete_table("track")
delete_table("trackFinal")
delete_table("flarm_db") # flarm_db should be rebuilt at start of each day
db.commit()
# Wait for sunrise
# wait_time = next_sunrise - datetime_now
datetime_now = datetime.datetime.now()
date = datetime.datetime.now()
location.date = ephem.Date(datetime.datetime.now())
next_sunrise = location.next_rising(ephem.Sun(), date).datetime()
print "Location Date now: ", location.date, " Next sunrise is: ", next_sunrise
wait_time = location.next_rising(ephem.Sun(), date).datetime() - datetime_now
print "Next sunrise at: ", location.next_rising(ephem.Sun(), date).datetime(), " Datetime now is: ", datetime_now
# Wait an additional 2 hours (in seconds) more before resuming.
# Just a bit of security, not an issue as unlikely to start flying so early
wait_time_secs = int(wait_time.total_seconds()) + (2 * 60 * 60)
# close socket -- not needed. Create new one at sunrise
try:
sock.shutdown(0)
except socket.error as msg:
print "Socket failed to shutdown, ignore. Msg is: " , msg
sock.close()
#
# Delete historic files as specified
#
print "+++++++Phase 4 Start Delete out of date files+++++++"
delete_flogger_file(settings.FLOGGER_TRACKS_FOLDER, "track", settings.FLOGGER_DATA_RETENTION)
delete_flogger_file(settings.FLOGGER_FLIGHTS_LOG, "flights.csv", settings.FLOGGER_DATA_RETENTION)
print "-------Phase 4 End-------"
#
# Sleep till sunrise
# Then open new socket, set ephem date to new day
#
print "Wait till after sunrise at: ", next_sunrise, " Elapsed time: ", wait_time, ". Wait seconds: ", wait_time_secs
# self.RunningLabel.setText("Sleeping")
time.sleep(wait_time_secs)
# Sun has now risen so recommence logging flights
location.date = ephem.Date(datetime.datetime.now())
print "Woken up. Date time is now: ", datetime.datetime.now()
print "Ephem datetime on wakeup is: ", ephem.Date(location.date)
# Make new socket as old one will have timed out during the 'big' sleep, reset the timers
start_time = datetime.datetime.now()
keepalive_time = time.time()
sock = APRS_connect(settings)
sock_file = sock.makefile() # Note both sock & sock_file get used
#
#-----------------------------------------------------------------
# Build local database from flarmnet of aircraft for today
# Note source flarm_db may have changed during previous day
#-----------------------------------------------------------------
#
if flarmdb(settings.FLOGGER_FLARMNET_DB_URL, cursor, db, "flarm_data", settings) == True:
print "Flarmnet db built for today"
else:
print "Flarmnet db re-build failed, exit"
exit()
i = 0 # Count of todays APRS reads reset
flight_no = {} # Re-initialise flight_no dictionary at start of day
track_no = {} # Re-initialise track_no dictionary at start of day
continue
current_time = time.time()
elapsed_time = int(current_time - keepalive_time)
print "Elapsed time is: ", elapsed_time
if (current_time - keepalive_time) > settings.FLOGGER_KEEPALIVE_TIME:
try:
print "Socket open for: ", (current_time - keepalive_time), " seconds, send keepalive"
rtn = sock_file.write("#Python Example App\n\n")
sock_file.flush() # Make sure it gets sent
print "Send keepalive", elapsed_time, " rtn is: ", rtn
keepalive_time = current_time
except Exception, e:
print ('something\'s wrong with socket write. Exception type is %s' % (`e`))
sock_file = connect_APRS(sock)
print "New connection to APRS made"
continue
else:
print "No keepalive sent"
print "In while loop. Count= ", i
try:
if test == False:
# In live mode so use socket read
print "Read socket"
packet_str = sock_file.readline()
print "Raw APRS Packet: ", packet_str
# datafile.write(packet_str)
else:
# In test mode so file read
packet_str = datafile.readline()
except socket.error:
print "Socket error on readline"
print "packet string length is: ", len(packet_str), " packet is: ", packet_str
try:
len_packet_str = len(packet_str)
except TypeError:
packet_str_hex = ":".join("{:02x}".format(ord(c)) for c in packet_str)
len_packet_str = len(packet_str_hex) / 3
print "TypeError on packet_str length. Now is: ", len_packet_str
if len_packet_str == 0:
# create new socket & connect to server
print "Read returns zero length string on iteration: ", i
# Wait 20 seconds
time.sleep(20)
# continue
try:
sock.shutdown(0)
except socket.error, e:
if 'not connected' in e:
print '*** Transport endpoint is not connected ***'
print "socket no longer open so can't be closed, create new one"
else:
print "Socket still open so close it"
sock.close()
print "Create new socket"
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((settings.APRS_SERVER_HOST, settings.APRS_SERVER_PORT))
except Exception, e:
print "Connection refused. Errno: ", e
exit()
APRSparm = ('user %s pass %s vers %s %s filter r/%s/%s/%s\n ' % (settings.APRS_USER,
settings.APRS_PASSCODE,
settings.FLOGGER_NAME,
settings.FLOGGER_VER,
settings.FLOGGER_LATITUDE,
settings.FLOGGER_LONGITUDE,
settings.FLOGGER_RAD))
# print "APRSparm is: ", APRSparm
# sock.send('user %s pass %s vers Python_Example 0.0.1 filter r/+54.228833/-1.209639/25\n ' % (settings.APRS_USER, settings.APRS_PASSCODE))
sock.send(APRSparm)
# Make the connection to the server
sock_file = sock.makefile()
# Delete following line when not running in test mode
# exit()
continue
#
# Parse the returned packet into fields
# Note this uses a modified version of libfap as the master on
# github contains an error
#
packet = aprs_parse(packet_str, settings)
if packet:
print "aprs_parse rtnd: ", packet
else:
print "aprs_parse Failed. Not glider position packet"
continue
src_callsign = packet["from"]
latitude = packet["latitude"]
longitude = packet["longitude"]
altitude = packet["altitude"]
speed = packet["speed"]
course = packet["course"]
timestamp = packet["timestamp"]
CheckVals(src_callsign, "latitude", latitude)
nvalues[src_callsign]["longitude"] = longitude
nvalues[src_callsign]["altitude"] = altitude
nvalues[src_callsign]["speed"] = speed
#
# Removed libfap parsing 20180424
#
# Check if callsign is in the fleet
if fleet_check_new(str(src_callsign)) == False:
print "Aircraft ", src_callsign, " not registered at ", settings.FLOGGER_AIRFIELD_NAME, " , ignore"
print "-----------------End of Packet: ", i, " ------------------------------"
continue
else:
print "Aircraft ", src_callsign, " is in ", settings.FLOGGER_AIRFIELD_NAME, " fleet, process"
# Use registration if it is in aircraft table else just use Flarm_ID
# src_callsign = callsign_trans(src_callsign)
# print "Aircraft callsign is now: ", src_callsign
registration = callsign_trans(src_callsign)
print "Aircraft registration is: ", registration, " FLARM code is: ", src_callsign
# Check with this aircraft callsign has been seen before
CheckPrev(src_callsign, 'latitude', 0)
CheckVals(src_callsign, 'latitude', 0)
# Current and previous data values created
local_time = datetime.datetime.now()
fl_date_time = local_time.strftime("%D:%H:%M:%S")
fl_date = local_time.strftime("%y/%m/%d")
# fl_date = local_time.strftime("%D")
fl_time = local_time.strftime("%H:%M:%S")
print "src_callsign matched: ", src_callsign, " ", fl_date_time, " Latitude is: ", latitude
# print "Line ", i, " ", packet[0].orig_packet
# if nprev_vals[src_callsign]['speed'] == 0 and nvalues[src_callsign]['speed'] <> 0:
# af_loc = (settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE)
# takeoff_loc = (latitude, longitude)
takeoff_dist = vincenty((settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE), (latitude, longitude)).meters
print "Test for was stopped now moving. nprevs[speed] is: " + str(nprev_vals[src_callsign]['speed']) + " nvalues[speed] is: "+ str(nvalues[src_callsign]['speed'])
# if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_SMALL:
# if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_SMALL and takeoff_dist < settings.FLOGGER_AIRFIELD_LIMIT:
if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_TAKEOFF_MIN and takeoff_dist < settings.FLOGGER_AIRFIELD_LIMIT:
# The previous speed means it was probably stopped, the current speed means it is probably moving and the position is within the airfield
# Following test for case when Flarm is switched on for first time when stationary and at an
# altitude greater than settings.FLOGGER_QNH, ie a special case of initial location. nprev_vals get set to zero when aircraft
# first detected by flarm. Doesn't work. Needs thought
# if (nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_SMALL) or (nprev_vals[src_callsign]['speed'] == nvalues[src_callsign]['speed'] and nvalues[src_callsign]['speed']> V_SMALL):
print "New test true for switch-on"
print "Takeoff point is: ", (latitude, longitude), "Distance is: ", takeoff_dist
email_msg(settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX, registration, fl_time, settings)
# aircraft was stopped, now isn't
# Enhancement. At this point create new Track table record for the flight.
# Set track_no to current value and increment for use by next new flight.
# Flight_no (ie flight_log2 id field) has to copied to the Track table record
# each time new track data record for the flight is added.
print "Aircraft ", src_callsign, " was stopped, now moving. Create new record"
cursor.execute('''INSERT INTO flight_log2(sdate, stime, edate, etime, duration, src_callsign, max_altitude, speed, registration)
VALUES(:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude,:speed, :registration)''',
{'sdate':fl_date, 'stime':fl_time, 'edate': "", 'etime':"", 'duration': "", 'src_callsign':src_callsign, 'max_altitude':altitude, 'speed':0, 'registration': registration})
nprev_vals[src_callsign]['speed'] = nvalues[src_callsign]['speed']
print "Storing initial track data"
cursor.execute('''SELECT max(id) FROM flight_log2''')
lastrow_id = cursor.fetchone()[0] # value of id for row just inserted use as flight_no for flight
print "Last row ID of flight_log2 inserted was: ", lastrow_id
flight_no[src_callsign] = lastrow_id
# flight_no[src_callsign] = cursor.lastrowid # Unique value of row just created
track_no[src_callsign] = 1 # Initialise trackpoint number for this flight
addTrack(cursor, flight_no[src_callsign],track_no[src_callsign],longitude,latitude,altitude,course,speed,timestamp)
track_no[src_callsign] += 1 # Increment trackpoint number for this flight
# if nprev_vals[src_callsign]['speed'] <> 0 and nvalues[src_callsign]['speed'] == 0:
# print "Test for was moving is now stopped"
print "Test for was moving is now stopped. nprev=: ", nprev_vals[src_callsign]['speed'], " nval=: ", nvalues[src_callsign]['speed'], " V_LANDING_MIN=: ", V_LANDING_MIN
# if nprev_vals[src_callsign]['speed'] > V_SMALL and nvalues[src_callsign]['speed'] <= V_SMALL:
if nprev_vals[src_callsign]['speed'] > V_LANDING_MIN and nvalues[src_callsign]['speed'] <= V_LANDING_MIN:
# aircraft was moving is now stopped
print "Aircraft ", src_callsign, " was moving, now stopped. Update record for end date & time"
# Add final track record
try:
addTrack(cursor, flight_no[src_callsign],track_no[src_callsign],longitude,latitude,altitude,course,speed,timestamp)
# Find latest record for this callsign
except KeyError, reason:
print "addTrack failed. Trackpoint ignored. Reason: ", reason
#
# Bug 20150520-1 Test Start
#
try:
cursor.execute('''SELECT max(id) FROM flight_log2 WHERE src_callsign =?''', (src_callsign,))
r = cursor.fetchone()
try:
rowid = r[0]
cursor.execute('''SELECT sdate, stime, max_altitude FROM flight_log2 WHERE ROWID =?''', (rowid,))
row = cursor.fetchone()
print "Test Bug 20150520-1 ok, row is: ", row
except:
print "Select for sdate/stime failed for: ", rowid
except:
print "Select max(id) failed for: ", src_callsign
#
# Bug 20150520-1 Test End
#
cursor.execute('''SELECT sdate, stime, max_altitude, id FROM flight_log2 WHERE
ROWID IN (SELECT max(id) FROM flight_log2 WHERE src_callsign =? )''', (src_callsign,))
row = cursor.fetchone()
#
# Bug 20150520-1 Start
# Re-initialise altitude for stopped aircraft to zero. And above row is None
#
if row == None:
print "Bug 20150520-1. We have a problem with: ", src_callsign
continue
#
# Bug 20150520-1 End
#
# for r in row:
# print "Returned row for callsign: ", src_callsign, " is: ", r
# end_time = datetime.strptime(fl_time,'%H:%M:%S')
end_time = datetime.datetime.now() # In seconds since epoch
start_date = row[0] # In %y/%m/%d format
start_time = row[1] # In %H:%M:%S format
max_altitude = row[2]
flight = row[3] # id field of flight_log2
fl_end_datetime = datetime.datetime.now()
fl_end_date = fl_end_datetime.strftime("%y/%m/%d")
fl_end_time_str = fl_end_datetime.strftime("%H:%M:%S")
# fl_end_time = fl_end_time_str
fl_end_time = datetime.datetime.strptime(fl_end_time_str, "%H:%M:%S")
print "Flight End date and time are: ", fl_end_date, " , ", fl_end_time_str
print "Flight Start date and time are: ", start_date, " , ", start_time
fl_start_time = datetime.datetime.strptime(start_time, "%H:%M:%S") # Convert flight start time to type time
fl_duration_datetime = fl_end_time - fl_start_time # fl_duration_time is a string format %H:%M:%S
# fl_duration_time = datetime.datetime.strptime(fl_duration_datetime, "%H:%M:%S")
c = fl_duration_datetime
# fl_duration_time = "%.2dh: %.2dm: %.2ds" % (c.seconds//3600,(c.seconds//60)%60, c.seconds%60)
fl_duration_time = "%.2d: %.2d: %.2d" % (c.seconds//3600,(c.seconds//60)%60, c.seconds%60)
fl_duration_time_str = str(fl_duration_time)
print "Start time: ", fl_start_time, "End time: ", fl_end_time_str, "Duration: ", fl_duration_time, " Max altitude: ", max_altitude
# Add record to flight_log_final
cursor.execute('''INSERT INTO flight_log_final(sdate, stime, edate, etime, duration, src_callsign, max_altitude, speed, registration, flight_no)
VALUES(:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude,:speed, :registration,:flight_no)''',
{'sdate':start_date, 'stime':start_time, 'edate': fl_end_date, 'etime':fl_end_time_str,
'duration': fl_duration_time_str, 'src_callsign':src_callsign, 'max_altitude':max_altitude, 'speed':0, 'registration': registration, 'flight_no': flight})
print "Updated flight_log_final", src_callsign
# flogger_landout_check(flight_reg, af_centre, radius, landing_coords, mode)
af_loc = (settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE)
cursor.execute('''SELECT land_out FROM flight_log_final WHERE flight_no=?''', (flight,))
row = cursor.fetchone()
if row[0] == None:
# Check whether land_out already been logged
# This is needed since using input from multiple base stations, landout can be logged more than once
res =landout_check(registration, flight, af_loc, settings.FLOGGER_AIRFIELD_LIMIT, (latitude, longitude), settings.FLOGGER_LANDOUT_MODE, settings)
print "Landout check is: ", res
if res == True:
landout_status = "yes"
else:
landout_status = "no"
cursor.execute('''UPDATE flight_log_final SET land_out=? WHERE flight_no=?''', (landout_status,flight))
else:
print "Landout check. row[0]: ", row[0]
# Update flight record in flight_log2
cursor.execute(''' SELECT max(id) FROM flight_log2 WHERE src_callsign =?''', (src_callsign,))
row = cursor.fetchone()
rowid = row[0]
print "Update row: ", rowid
try:
cursor.execute('''UPDATE flight_log2 SET edate=?, etime=?, duration=?, max_altitude=?, speed=? WHERE ROWID=?''',
(fl_end_date, fl_end_time_str, fl_duration_time_str, max_altitude, 0, rowid))
print "Updated flight_log2", src_callsign, " Row: ", rowid
except:
print "Failed to update flight_log2: ", src_callsign, " Row: ", rowid
nprev_vals[src_callsign]['speed'] = nvalues[src_callsign]['speed'] # ie set to '0'
#
# Bug 20150520-1
# Re-initialise altitude for stopped aircraft to zero
#
print "Bug 20150520-1. Re-initialise altitude in nvalues & nprev_vals for: ", src_callsign
nprev_vals[src_callsign]['altitude'] = 0
nvalues[src_callsign]['altitude'] = 0
# Check updated record
print "Check fields in flight_log2: ", src_callsign, " Row: ", rowid
cursor.execute('''SELECT ROWID, sdate, stime, edate, etime, duration, max_altitude FROM flight_log2 WHERE
ROWID IN (SELECT max(id) FROM flight_log2 WHERE src_callsign =? )''', (src_callsign,))
row = cursor.fetchone()
for r in row:
print "Returned row for callsign: ", src_callsign, " is: ", r
db.commit()
print "-----------------End of Packet: ", i, " ------------------------------"
continue
# if nprev_vals[src_callsign]['speed'] == 0 and nvalues[src_callsign]['speed'] == 0:
print "Is Aircraft %s moving? nprev.speed=%d, nvalues.speed=%d, nvalues.altitude=%d" % (src_callsign, nprev_vals[src_callsign]['speed'], nvalues[src_callsign]['speed'], nvalues[src_callsign]['altitude'])
# if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['altitude'] <= settings.FLOGGER_QNH:
if nprev_vals[src_callsign]['speed'] <= V_TAKEOFF_MIN and nvalues[src_callsign]['speed'] <= V_TAKEOFF_MIN and nvalues[src_callsign]['altitude'] <= settings.FLOGGER_QNH:
# Aircraft hasn't moved and is not at an altitude greater than Sutton Bank.
print "Aircraft: ", src_callsign, " Not moving. Speed was: ", nprev_vals[src_callsign]['speed'], " Speed is: ", nvalues[src_callsign]['speed']
else:
# aircraft is moving. Check whether current altitude is greater than previous
# Enhancement. Add new record to Tracks table for this flight here. Track_no for flight is initialised
# when flight record is created, initial Track table record for flight is also created at that time
print "Aircraft ", src_callsign, " is still moving"
# Check whether a track list has been set up. May have to add flight_log2 record as well??
CheckTrackData(cursor, flight_no, track_no, src_callsign)
print "Flight details are: ", flight_no[src_callsign]
# Add track record for moving aircraft
addTrack(cursor, flight_no[src_callsign],track_no[src_callsign],longitude,latitude,altitude,course,speed,timestamp)
track_no[src_callsign] += 1 # Next trackpoint number for this flight
print "Old height was: ", nprev_vals[src_callsign]['altitude'], " New height is: ", nvalues[src_callsign]['altitude']
if nvalues[src_callsign]['altitude'] > nprev_vals[src_callsign]['altitude']:
print "Aircraft ", src_callsign, " is now higher than max height, was: ", nprev_vals[src_callsign]['altitude'], " now: ", nvalues[src_callsign]['altitude']
cursor.execute('''UPDATE flight_log2 SET max_altitude=? WHERE src_callsign=? ''', (altitude, src_callsign))
nprev_vals[src_callsign]['altitude'] = nvalues[src_callsign]['altitude'] # Now higher
else:
print "Aircraft callsign: ", src_callsign, " is moving but is not higher than max height: ", nvalues[src_callsign]['altitude'], " Speed is: ", nvalues[src_callsign]['speed'], " Was: ", nprev_vals[src_callsign]['speed']
# Set previous speed values to current
nprev_vals[src_callsign]['speed'] = nvalues[src_callsign]['speed']
continue
print "Values for callsign Commit: ", src_callsign, " Values are: ", nvalues[src_callsign], " Prev_vals are: ", nprev_vals[src_callsign]
db.commit()
print "-----------------End of Packet: ", i, " ------------------------------"
# libfap.fap_free(packet)
except KeyboardInterrupt:
print "Keyboard input received, ignore"
# db.commit()
pass
# print "libfap_cleanup. If not called results in memory leak"
# libfap.fap_cleanup()
# close socket -- must be closed to avoid buffer overflow
sock.shutdown(0)
sock.close()
# Close the database. Note this should be on all forms of exit
db.close()
| tobiz/OGN-Flight-Logger_V3 | flogger3.py | Python | gpl-3.0 | 77,519 |
from thefuck import shells
from thefuck.utils import sudo_support
try:
import CommandNotFound
except ImportError:
enabled_by_default = False
@sudo_support
def match(command, settings):
if 'not found' in command.stderr:
try:
c = CommandNotFound.CommandNotFound()
pkgs = c.getPackages(command.script.split(" ")[0])
name, _ = pkgs[0]
return True
except IndexError:
# IndexError is thrown when no matching package is found
return False
@sudo_support
def get_new_command(command, settings):
c = CommandNotFound.CommandNotFound()
pkgs = c.getPackages(command.script.split(" ")[0])
name, _ = pkgs[0]
formatme = shells.and_('sudo apt-get install {}', '{}')
return formatme.format(name, command.script)
| bugaevc/thefuck | thefuck/rules/apt_get.py | Python | mit | 817 |
# python day 22
# author zach.wang
# -*- coding:utf-8 -*-
import smtplib, sys, os, re
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
# TODO ONLY TEST NETEASE AND QQ mail
# options
receiver = ['wzq1397@live.cn', ]
subject = 'attachments'
smtpserver = 'smtp.qq.com'
port = 465
username = '641651925'
sender = username + '<641651925@qq.com>'
password = 'vatggmloaqchbbhi'
filepath = 'E:\\xml\\github\\'
picpath = 'E:\\xml\\github\\'
content = '''
<html><h1>This is Zach.Wang</h1></html>
<b>Some <i>HTML</i> text</b> and an image.<br><img src="cid:image1"><br>good!
'''
def smtplib_login (msg):
global port
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ';'.join(receiver)
try:
if 10 < port < 65535 :
if port == 465 or port == 994:
smtp = smtplib.SMTP_SSL()
elif port == 25:
smtp = smtplib.SMTP()
else:
choice = input("which type do you want to choose!\n1) NO_SSL\t2) SSL\n:>")
if choice == '1' or choice == '\n':
smtp = smtplib.SMTP()
elif choice == '2':
smtp = smtplib.SMTP_SSL()
else:
sys.stderr.write("please choose right choice!\n")
else:
raise ValueError
except Exception as e:
print("INVAILD PORT!!!")
smtp.connect(smtpserver, port)
smtp.login(username, password)
smtp.sendmail(sender, receiver, msg.as_string())
smtp.quit()
class SmtplibSyntaxError(Exception):
def __str__ (self):
var = "WRONG SYNTAX!"
return "%s" % var
def smtplib_text ():
msg = MIMEText(content, 'html', 'utf-8')
smtplib_login(msg)
def smtplib_attachement ():
msg = MIMEMultipart('related')
try:
if filepath[-1] != '\\':
att = MIMEText(open(filepath, 'rb').read(), 'base64', 'utf-8')
att["Content-Type"] = 'application/octet-stream'
# 使用re多文件中会出现无法识别编码问题!!!
att["Content-Disposition"] = 'attachment; filename=' + str(re.compile(r'\\').split(filepath)[-1])
msg.attach(att)
else:
for lst in os.listdir(filepath):
att = MIMEText(open(filepath + lst, 'rb').read(), 'base64', 'utf-8')
att["Content-Type"] = 'application/octet-stream'
att.add_header("Content-Disposition", "attachment", filename=os.path.basename(lst))
msg.attach(att)
except (FileNotFoundError, AttributeError, PermissionError) as e:
print(e)
smtplib_login(msg)
def smtplib_pic ():
msg = MIMEMultipart('related')
msgText = MIMEText(content, 'html', 'utf-8')
msg.attach(msgText)
picdict = ['png', 'jpg', 'jpeg', 'gif']
try:
if picpath[-1] != '\\':
if bool(picdict.index(re.compile(r'\.').split(os.path.basename(filepath))[-1])) == 0:
fp = open(picpath, 'rb')
msgImage = MIMEImage(fp.read())
fp.close()
msgImage.add_header('Content-ID', '<image-1>')
msg.attach(msgImage)
else:
print("unsupport type!")
else:
for lst in os.listdir(picpath):
tmp = re.compile(r'\.').split(os.path.basename(lst))[-1]
if str(picdict).find(tmp) > -1:
fp = open(picpath + lst, 'rb')
msgImage = MIMEImage(fp.read())
fp.close()
info = os.path.basename(lst)
msgImage.add_header('Content-ID', "<%s>" % info)
msg.attach(msgImage)
else:
pass
except (FileNotFoundError, AttributeError, PermissionError, ValueError) as e:
print(e)
smtplib_login(msg)
global flag
flag = 0
try:
if len(sys.argv) <= 2:
if str(sys.argv[1]).lower() == "text":
smtplib_text()
flag = 1
if str(sys.argv[1]).lower() == "attachement" or str(sys.argv[1]).lower() == "attach":
smtplib_attachement()
flag = 1
print(len(sys.argv))
if str(sys.argv[1]).lower() == "pic":
smtplib_pic()
flag = 1
if flag != 1:
raise SmtplibSyntaxError()
else:
raise SmtplibSyntaxError()
except SmtplibSyntaxError as e:
print(e)
finally:
if flag == 1:
print("send sucessfull!")
else:
print("send failed!")
| WZQ1397/automatic-repo | project/mailsystem/ZachMaillib/SmtpSystem.py | Python | lgpl-3.0 | 4,650 |
#! /usr/bin/env python
# Copyright 2014 Uri Laserson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import optparse
import vdj
import vdj.pipeline
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
if len(args) == 2:
inhandle = open(args[0],'r')
outhandle = open(args[1],'w')
elif len(args) == 1:
inhandle = open(args[0],'r')
outhandle = sys.stdout
elif len(args) == 0:
inhandle = sys.stdin
outhandle = sys.stdout
for chain in vdj.parse_imgt(inhandle):
vdj.pipeline.translate_chain(chain)
print >>outhandle, chain
| churchlab/vdj | bin/translate_chains.py | Python | apache-2.0 | 1,081 |
# Copyright 2016 Sebastian Spautz <sebastian@human-injection.de>
#
# This file is part of "RPI Display Backlight Control for Kodi".
#
# "RPI Display Backlight Control for Kodi" is free software: you can
# redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, either
# version 3 of the License, or any later version.
#
# "RPI Display Backlight Control for Kodi" is distributed in the hope
# that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
import xbmcaddon
import xbmcgui
import xbmc
import os
addon = xbmcaddon.Addon()
addonName = addon.getAddonInfo('name')
addonPath = addon.getAddonInfo('path')
class Screensaver(xbmcgui.WindowXMLDialog):
class ExitMonitor(xbmc.Monitor):
def __init__(self, exit_callback):
self.exit_callback = exit_callback
def onScreensaverDeactivated(self):
self.exit_callback()
def prepareShellCommand(self, command):
if os.geteuid() != 0:
self.log('Don\'t root, try sudo to toggle backlight.')
return 'sudo bash -c \'' + command + '\''
else:
return command;
def onInit(self):
self.log('Start Screensaver')
self.exit_monitor = self.ExitMonitor(self.exit)
shellCommand = self.prepareShellCommand('echo 1 > /sys/class/backlight/rpi_backlight/bl_power')
os.system(shellCommand)
def exit(self):
self.exit_monitor = None
shellCommand = self.prepareShellCommand('echo 0 > /sys/class/backlight/rpi_backlight/bl_power')
os.system(shellCommand)
self.close()
self.log('Stopped Screensaver')
def log(self, msg):
xbmc.log(u'%(name)s: %(message)s' % {'name': addonName, 'message': msg})
if __name__ == '__main__':
screensaver = Screensaver(
'screensaver-%s-Main.xml' % addonName.replace(' ', ''),
addonPath,
'default',
)
screensaver.doModal()
del screensaver
sys.modules.clear() | sebastiansIT/RPI-Display-Backlight-Controller-for-Kodi | sources/screensaver.rpi-backlight-disabler/screensaver.py | Python | gpl-3.0 | 2,345 |
# This file is part of the Hotwire Shell user interface.
#
# Copyright (C) 2007 Colin Walters <walters@verbum.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os, sys, stat, signal, datetime
import gtk, gobject, pango
from hotwire.logutil import log_except
import hotwire_ui.widgets as hotwidgets
from hotwire_ui.render import ClassRendererMapping, TreeObjectsRenderer
class DictRenderer(TreeObjectsRenderer):
def __init__(self, *args, **kwargs):
super(DictRenderer, self).__init__(*args, **kwargs)
self.__obj = None
def _setup_view_columns(self):
colidx = self._table.insert_column_with_data_func(-1, 'Key',
hotwidgets.CellRendererText(),
self.__render_tuple_slice, 0)
colidx = self._table.insert_column_with_data_func(-1, 'Value',
hotwidgets.CellRendererText(ellipsize=True),
self.__render_tuple_slice, 1)
@log_except()
def __render_tuple_slice(self, col, cell, model, iter, idx):
tup = model.get_value(iter, 0)
v = tup[idx]
valrepr = unicode(repr(v))
cell.set_property('text', valrepr)
def get_objects(self):
yield self.__obj
def append_obj(self, o):
if self.__obj is not None:
return
self.__obj = o
superappend = super(DictRenderer, self).append_obj
for k,v in o.iteritems():
superappend((k, v))
ClassRendererMapping.getInstance().register(dict, DictRenderer)
| desirable-objects/hotwire-shell | hotwire_ui/renderers/dict.py | Python | gpl-2.0 | 2,355 |
"""This is to generate the awards table."""
import argparse
import random
import socket
import sys
import time
from copy import copy
from datetime import datetime
from json import dumps, load
from os.path import isdir, split
from textwrap import wrap
import requests
import tabulate
from praw import Reddit
AWARD_TYPES = {
"a": lambda _: True,
"g": lambda award: award["award"]["awardType"] == "GLOBAL",
"s": lambda award: award["award"]["awardType"] == "SUBREDDIT",
"m": lambda award: award["award"]["awardType"] == "MODERATOR",
}
def receive_connection():
"""Wait for and then return a connected socket..
Opens a TCP connection on port 8080, and waits for a single client.
"""
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("localhost", 65010))
server.listen(1)
client = server.accept()[0]
server.close()
return client
def send_message(client, message):
"""Send message to client and close the connection."""
print(message)
client.send(f"HTTP/1.1 200 OK\r\n\r\n{message}".encode("utf-8"))
client.close()
def get_request_params(client_id, redirect_uri, thing):
scopes = ["*"]
reddit = Reddit(
client_id=client_id,
client_secret=None,
redirect_uri=redirect_uri,
user_agent="Award fetcher by u/Lil_SpazJoekp",
)
state = str(random.randint(0, 65000))
url = reddit.auth.url(scopes, state, "temporary")
print(f"Open this url in your browser: {url}")
sys.stdout.flush()
client = receive_connection()
data = client.recv(1024).decode("utf-8")
param_tokens = data.split(" ", 2)[1].split("?", 1)[1].split("&")
params = {
key: value for (key, value) in [token.split("=") for token in param_tokens]
}
if state != params["state"]:
send_message(
client,
f"State mismatch. Expected: {state} Received: {params['state']}",
)
return
elif "error" in params:
send_message(client, params["error"])
return
reddit.auth.authorize(params["code"])
thing = list(reddit.info([thing]))[0]
subreddit = thing.subreddit_id
return reddit._authorized_core._authorizer.access_token, thing.fullname, subreddit
def fetch_awards(client_id, redirect_uri, thing_fullname):
access_code, thing, subreddit = get_request_params(
client_id, redirect_uri, thing_fullname
)
if access_code:
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {access_code}",
"User-Agent": "Award fetcher by u/Lil_SpazJoekp",
}
params = {"request_timestamp": str(int(time.time() * 1000))}
data = f'{{"id":"4fb406bbd0cf","variables":{{"subredditId":"{subreddit}","thingId":"{thing}","includeGroup":true}}}}'
response = requests.post(
"https://gql.reddit.com/", headers=headers, params=params, data=data
)
return response.json()
def main():
"""Runs the main function.
usage: generate_award_table.py [-t] [-f] format [-c] client_id [-T] submission or
comment fullname [-l] load file [-o] out file.
Grabs the awards available to award a submission or comment.
:param --type [-t]: One of ``a`` for all, ``g`` for global, ``s`` for subreddit, or
``m`` for moderator. Determines the types of awards to give (default: ``g``).
:param --format [-f]: One of ``j`` for json or ``r`` for rst.
:param --client_id [-c]: Used to fetch the awards. Must be a 1st party client id.
Note: If this is passed [redirect_uri] and [thing] must be provided. If not
[load_file] must be passed.
:param --redirect_uri [-r]: Redirect uri for the auth flow as this requires an
access token.
:param --thing [-T]: A submission or comment fullname.
:param --load_file [-l]: Load award json from file. This is useful if you grab the
JSON response from a browser request. Can not be used with [client_id]. If not
provided [client_id] and [thing] is required.
:param --out_file [-o]: File to write the formatted. If not provided output will be
written to STDOUT.
"""
parser = argparse.ArgumentParser(
description="Parse awards and generate an rst formatted table"
)
parser.add_argument(
"-t",
"--type",
action="store",
choices=["a", "g", "s", "m"],
default="g",
help=(
"One of ``a`` for all, ``g`` for global, ``s`` for subreddit, or ``m`` for"
" moderator. Determines the types of awards to give (default: ``g``)."
),
)
parser.add_argument(
"-f",
"--format",
action="store",
choices=["j", "r"],
default="r",
help="One of ``j`` for json or ``r`` for rst (default: ``r``).",
)
parser.add_argument(
"-c",
"--client_id",
action="store",
default=None,
help=(
"Used to fetch the awards. Must be a 1st party client id. Note: If this is"
" passed [thing] and [subreddit] must be provided."
),
)
parser.add_argument(
"-r",
"--redirect_uri",
action="store",
default=None,
help="Redirect uri for the auth flow as this requires an access token",
)
parser.add_argument(
"-T",
"--thing",
action="store",
default=None,
help=(
"A submission or comment fullname. Must be used in conjunction with"
" [client_id]."
),
)
parser.add_argument(
"-l",
"--load_file",
action="store",
default=None,
help=(
"Load award json from file. This is useful if you grab the JSON response"
" from a browser request. Can not be used with [client_id]. If not provided"
" [client_id] and [thing] is required."
),
)
parser.add_argument(
"-o",
"--out_file",
action="store",
default=None,
help=(
"File to write the formatted. If not provided output will be written to"
" STDOUT."
),
)
args = parser.parse_args()
award_type = args.type
output_format = args.format
client_id = args.client_id
redirect_uri = args.redirect_uri
thing = args.thing
load_file = args.load_file
out_file = args.out_file
# check for client id or load file reject if both provided
if client_id and load_file:
print("Both 'client_id' and 'load_file' can not be provided")
return
if client_id:
if not thing:
print("'thing' is requited if 'client_id' is provided")
return
award_json = fetch_awards(client_id, redirect_uri, thing)
else:
with open(load_file) as f:
award_json = load(f)
awards = sorted(
validate_award_json(award_json, award_type),
key=lambda d: (
0 if d["id"].startswith("gid") else 1,
d["coinPrice"],
d["name"],
),
)
if output_format == "j":
if load_file:
print(
"Uh...there's nothing to do if you want output the loaded JSON from"
" 'load_file' as JSON"
)
return
final_content = dumps(award_json, ident=4)
else:
rows = [
[
f"{award['name']}",
f".. image:: {award['icon64']['url']}",
award["id"],
"\n".join(wrap(award["description"], width=50)),
str(award["coinPrice"]),
]
for award in awards
]
table = tabulate.tabulate(
rows,
["Name", "Icon", "Gild Type", "Description", "Cost"],
tablefmt="rst",
disable_numparse=True,
)
final_content = (
"This is a list of known global awards (as of "
f"{datetime.today().strftime('%m/%d/%Y')})\n\n{table}"
)
if out_file is None:
print(final_content)
else:
if isdir(split(out_file)[0]):
with open(out_file, "w") as f:
f.write(final_content)
print(f"Successfully written awards to {out_file!r}")
else:
print(f"THe directory, {split(out_file)[0]!r}, does not exist.")
def validate_award_json(award_json, award_type):
awards = copy(award_json)
for key in ["data", "subredditInfoById", "sortedUsableAwards"]:
try:
awards = awards[key]
except KeyError:
print("Invalid award JSON")
return [award["award"] for award in awards if AWARD_TYPES[award_type](award)]
if __name__ == "__main__":
main()
| praw-dev/praw | tools/generate_award_table.py | Python | bsd-2-clause | 8,880 |
# file doc/conf.py
#
# Copyright 2012 Emory University Library
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Finding Aids documentation build configuration file
import django
django.setup()
import findingaids
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage'
]
#templates_path = ['templates']
exclude_trees = ['build']
source_suffix = '.rst'
master_doc = 'index'
project = 'Finding Aids'
copyright = '2010, Emory University Libraries'
version = '%d.%d' % findingaids.__version_info__[:2]
release = findingaids.__version__
#modindex_common_prefix = ['findingaids.']
pygments_style = 'sphinx'
html_theme = 'alabaster'
#html_static_path = ['static']
htmlhelp_basename = 'fadoc'
latex_documents = [
('index', 'FindingAids.tex', u'Finding Aids Documentation',
u'EUL', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
todo_include_todos = True
| emory-libraries/findingaids | doc/conf.py | Python | apache-2.0 | 1,491 |
"""
This module will test the start --force (ex restart) command
"""
from controller.app import Configuration
from tests import (
Capture,
create_project,
exec_command,
get_container_start_date,
init_project,
pull_images,
start_project,
start_registry,
)
def test_all(capfd: Capture) -> None:
exec_command(capfd, "restart", "This command is no longer available")
create_project(
capfd=capfd,
name="first",
auth="postgres",
frontend="no",
)
init_project(capfd)
start_registry(capfd)
pull_images(capfd)
start_project(capfd)
start_date1 = get_container_start_date(capfd, "backend")
exec_command(
capfd,
"start",
"Stack started",
)
start_date2 = get_container_start_date(capfd, "backend")
# The service is not restarted because its definition is unchanged
assert start_date1 == start_date2
if Configuration.swarm_mode:
exec_command(
capfd,
"remove backend",
"first_backend scaled to 0",
"verify: Service converged",
"Services removed",
)
exec_command(
capfd,
"start --force",
"Stack started",
)
start_date3 = get_container_start_date(capfd, "backend")
assert start_date2 != start_date3
| rapydo/do | tests/test_restart.py | Python | mit | 1,353 |
from ..workdays import *
from datetime import datetime, timedelta
from time import strptime
import math
import traceback
tests=[]
def test( fn ):
tests.append(fn)
return fn
def runTests():
for t in tests:
print t
try: t()
except Exception as e:
print e
traceback.print_exc()
print
def _parse_date( datestr ):
return datetime(*strptime(datestr, "%Y-%m-%d")[0:5]).date()
def _parse_datetime( datestr ):
if type(datestr) == type(""):
return datetime(*strptime(datestr, "%Y-%m-%d %H:%M")[0:5])
elif type(datestr) == type((1,2)):
return datetime(*datestr)
elif type(datestr) == type(dt.datetime(1900,1,1)):
return datestr
return None
def _is_same_dt( d1, d2, numParts=5 ):
return d1.timetuple()[:numParts] == d2.timetuple()[:numParts]
@test
def shouldEstimateEnd():
def test( d1, d2, total, remaining, dexp ):
d1 = _parse_datetime(d1)
d2 = _parse_datetime(d2)
dexp = _parse_date(dexp)
dres = estimate_end( d1, d2, total, remaining )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 3 ) )
# Monday 2017-03-06
test( "2017-03-06 00:00", "2017-03-07 00:00", 2, 1, "2017-03-08" )
test( "2017-03-06 00:00", "2017-03-08 00:00", 2, 1, "2017-03-10" )
test( "2017-03-06 00:00", "2017-03-09 00:00", 2, 1, "2017-03-12" )
test( "2017-03-06 00:00", "2017-03-10 00:00", 2, 1, "2017-03-14" )
test( "2017-03-06 00:00", "2017-03-13 00:00", 2, 1, "2017-03-20" )
@test
def shouldAdjustStart():
def test( d1, dexp ):
dexp = _parse_datetime(dexp)
dres = adjusted_start( _parse_datetime( d1 ) )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 5 ) )
# Monday 2017-03-06
test( "2017-03-06 08:00", "2017-03-06 08:00" )
test( "2017-03-07 08:00", "2017-03-07 08:00" )
test( "2017-03-08 08:00", "2017-03-08 08:00" )
test( "2017-03-09 08:00", "2017-03-09 08:00" )
test( "2017-03-10 08:00", "2017-03-10 08:00" )
test( "2017-03-11 08:00", "2017-03-13 00:00" )
test( "2017-03-12 08:00", "2017-03-13 00:00" )
@test
def shouldAdjustEnd():
def test( d1, dexp ):
dexp = _parse_datetime(dexp)
dres = adjusted_end( _parse_datetime( d1 ) )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 5 ) )
# Monday 2017-03-06
test( "2017-03-06 08:00", "2017-03-06 08:00" )
test( "2017-03-07 08:00", "2017-03-07 08:00" )
test( "2017-03-08 08:00", "2017-03-08 08:00" )
test( "2017-03-09 08:00", "2017-03-09 08:00" )
test( "2017-03-10 08:00", "2017-03-10 08:00" )
test( "2017-03-11 08:00", "2017-03-10 23:59" )
test( "2017-03-12 08:00", "2017-03-10 23:59" )
@test
def shouldEstimateEndWorkdays():
def test( d1, d2, total, remaining, dexp ):
d1 = _parse_datetime(d1)
d2 = _parse_datetime(d2)
dexp = _parse_datetime(dexp)
dres = estimate_end_workdays( d1, d2, total, remaining )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 3 ) )
# Monday 2017-03-06
# same week
test( "2017-03-06 08:00", "2017-03-07 08:00", 2, 1, "2017-03-08 08:00" )
test( "2017-03-06 08:00", "2017-03-08 08:00", 2, 1, "2017-03-10 08:00" )
# projection spans weekends
test( "2017-03-06 08:00", "2017-03-09 08:00", 2, 1, "2017-03-14 08:00" )
test( "2017-03-06 08:00", "2017-03-10 08:00", 2, 1, "2017-03-16 08:00" )
# a weekend is in the completed time, estimate falls on weekend
# 06 07 08 09 10 w11 w12 13 14 15 16 17 w18 w19 20
test( "2017-03-06 08:00", "2017-03-13 08:00", 2, 1, "2017-03-20 08:00" )
# Start on weekend
test( "2017-03-05 08:00", "2017-03-10 08:00", 2, 1, "2017-03-16 08:00" )
test( "2017-03-04 08:00", "2017-03-10 08:00", 2, 1, "2017-03-16 08:00" )
# Start and now on weekend
test( "2017-03-05 08:00", "2017-03-11 08:00", 2, 1, "2017-03-17 23:59" )
test( "2017-03-04 08:00", "2017-03-12 08:00", 2, 1, "2017-03-17 23:59" )
@test
def shouldEstimateEndWorkdays2():
def test( d1, d2, total, remaining, dexp ):
d1 = _parse_datetime(d1)
d2 = _parse_datetime(d2)
dexp = _parse_datetime(dexp)
dres = estimate_end_workdays( d1, d2, total, remaining )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 3 ) )
if not _is_same_dt( dres, dexp ):
print " diff:", dres - dexp
# Monday 2017-03-06
d1 = dt.datetime(2017, 03, 06, 8 )
d2 = dt.datetime(2017, 03, 13, 8 )
for done in xrange(1, 22, 5):
dexp = d2 + dt.timedelta( weeks=done )
print done, dt.timedelta( weeks=done ),
test( d1, d2, done+1, done, dexp )
runTests()
| mmahnic/trac-tickethistory | tickethistory/test/workdays_t.py | Python | mit | 4,805 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StorageAccountRegenerateKeyParameters(Model):
"""StorageAccountRegenerateKeyParameters.
:param key_name: Possible values include: 'key1', 'key2'
:type key_name: str or :class:`KeyName
<Fixtures.AcceptanceTestsStorageManagementClient.models.KeyName>`
"""
_attribute_map = {
'key_name': {'key': 'keyName', 'type': 'KeyName'},
}
def __init__(self, key_name=None):
self.key_name = key_name
| tbombach/autorest | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/StorageManagementClient/storagemanagementclient/models/storage_account_regenerate_key_parameters.py | Python | mit | 960 |
from importlib import import_module
from django.apps import AppConfig as BaseAppConfig
class AppConfig(BaseAppConfig):
name = "laboite"
def ready(self):
import_module("laboite.receivers")
| bgaultier/laboitepro | laboite/apps/__init__.py | Python | agpl-3.0 | 209 |
"""
Tests suite for the template tags of the notifications app.
"""
from django.test import TestCase
from django.template import (engines,
TemplateSyntaxError)
from django.contrib.auth import get_user_model
from ..models import Notification
class NotificationsTemplateTagsTestCase(TestCase):
"""
Tests suite for the views.
"""
def setUp(self):
"""
Create a new user named "johndoe" with password "illpassword".
"""
self.user1 = get_user_model().objects.create_user(username='johndoe',
password='illpassword',
email='john.doe@example.com')
self.user2 = get_user_model().objects.create_user(username='johnsmith',
password='illpassword',
email='john.smith@example.com')
self.notif1 = Notification.objects.create(recipient=self.user1,
title='Test 1',
message='Test 1',
message_html='<p>Test 1</p>')
self.notif2 = Notification.objects.create(recipient=self.user2,
title='Test 2',
message='Test 2',
message_html='<p>Test 2</p>')
self.notif3 = Notification.objects.create(recipient=self.user1,
title='Test 3',
message='Test 3',
message_html='<p>Test 3</p>',
unread=False)
def test_notifications_count_include_tag(self):
"""
Test if the ``notifications_count`` template tag work when used as an include tag.
"""
template_code = "{% load notifications %}{% notifications_count %}"
template = engines['django'].from_string(template_code)
html = template.render({'user': self.user1})
self.assertEqual('1', html)
def test_notifications_count_assignment_tag(self):
"""
Test if the ``notifications_count`` template tag work when used as an assignment tag.
"""
template_code = "{% load notifications %}{% notifications_count as foobar %}#{{ foobar }}"
template = engines['django'].from_string(template_code)
html = template.render({'user': self.user1})
self.assertEqual('#1', html)
def test_notifications_count_bad_argc(self):
"""
Test if the ``notifications_count`` template tag raise error on bad arguments count.
"""
with self.assertRaises(TemplateSyntaxError):
template_code = "{% load notifications %}{% notifications_count 1 2 3 4 %}"
template = engines['django'].from_string(template_code)
template.render({'user': self.user1})
def test_notifications_count_bad_argv(self):
"""
Test if the ``notifications_count`` template tag raise error on bad arguments placement.
"""
with self.assertRaises(TemplateSyntaxError):
template_code = "{% load notifications %}{% notifications_count foo bar %}"
template = engines['django'].from_string(template_code)
template.render({'user': self.user1})
| TamiaLab/carnetdumaker | apps/notifications/tests/test_templatetags.py | Python | agpl-3.0 | 3,591 |
# Create your views here.
from django import forms
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from django.template.loader import get_template
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
import datetime
from django.shortcuts import render_to_response
from django.core import serializers
import json
import decimal
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
# app specific files
from .models import *
from .forms import *
def to_json(python_object):
if isinstance(python_object, Cart):
return {
'__class__': 'Cart',
'__value__': {
'items': serializers.serialize('json', python_object.items),
'total_price': str(python_object.total_price)
}
}
raise TypeError(repr(python_object) + ' is not JSON serializable')
def from_json(json_object):
if '__class__' in json_object:
if json_object['__class__'] == 'Cart':
return Cart(items=[deserialized_object.object for deserialized_object in serializers.deserialize('json', json_object['__value__']['items'])],
total=decimal.Decimal(json_object['__value__']['total_price']))
return json_object
def create_product(request):
form = ProductForm(request.POST or None)
if form.is_valid():
form.save()
form = ProductForm()
t = get_template('depot/create_product.html')
c = RequestContext(request, locals())
return HttpResponse(t.render(c))
@login_required
def list_product(request):
list_items = Product.objects.all()
paginator = Paginator(list_items, 10)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
list_items = paginator.page(page)
except:
list_items = paginator.page(paginator.num_pages)
t = get_template('depot/p_list_product.html')
c = RequestContext(request, locals())
return HttpResponse(t.render(c))
def view_product(request, id):
product_instance = Product.objects.get(id=id)
t = get_template('depot/view_product.html')
c = RequestContext(request, locals())
return HttpResponse(t.render(c))
def edit_product(request, id):
product_instance = Product.objects.get(id=id)
form = ProductForm(request.POST or None, instance=product_instance)
if form.is_valid():
form.save()
t = get_template('depot/edit_product.html')
c = RequestContext(request, locals())
return HttpResponse(t.render(c))
def store_view(request):
products = Product.objects.filter(date_available__lt=datetime.datetime.now().date()).order_by('-date_available')
number = len(products)
try:
cart = json.loads(request.session.get('cart', None), object_hook=from_json)
except:
cart = Cart()
return render_to_response(
template_name='depot/store.html',
dictionary=locals(),
context_instance=RequestContext(request)
)
def view_cart(request):
try:
cart = json.loads(request.session.get("cart", None), object_hook=from_json)
except:
cart = None
if not cart:
cart = Cart()
request.session['cart'] = json.dumps(cart, default=to_json)
return render_to_response('depot/view_cart.html', locals(), context_instance=RequestContext(request))
def add_to_cart(request, product_id):
product = Product.objects.get(id=product_id)
try:
cart = json.loads(request.session['cart'], object_hook=from_json)
except:
cart = None
if not cart:
cart = Cart()
request.session['cart'] = json.dumps(cart, default=to_json)
cart.add_product(product)
request.session['cart'] = json.dumps(cart, default=to_json)
return view_cart(request)
def clear_cart(request):
request.session['cart'] = json.dumps(Cart(), default=to_json)
return view_cart(request)
def login_view(request):
user = authenticate(username=request.POST.get('username', None),
password=request.POST.get('password', None))
if user is not None:
login(request, user)
print(request.user)
return list_product(request)
else:
# 验证失败,暂不处理
return store_view(request)
def logout_view(request):
logout(request)
return store_view(request)
| HarrisonHDU/myerp | apps/depot/views.py | Python | mit | 4,631 |
from sqlobject import *
from ics_demo.dao.orm.base import IcsSQLObject
from ics_demo.dao.orm.host import Host
class BlockDevice(IcsSQLObject):
"""Block devices"""
#id = StringCol()
name = StringCol()
used = BoolCol()
capacity = StringCol()
absolute_path = StringCol()
osd = SingleJoin('OSD')
host = ForeignKey('Host') | lielongxingkong/ics_demo | ics_demo/dao/orm/block_device.py | Python | mit | 364 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2003-2005 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Classes for relationships.
"""
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import logging
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .lib import Person, ChildRefType, EventType, FamilyRelType
from .plug import PluginRegister, BasePluginManager
from .const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
MALE = Person.MALE
FEMALE = Person.FEMALE
UNKNOWN = Person.UNKNOWN
LOG = logging.getLogger("gen.relationship")
LOG.addHandler(logging.StreamHandler())
#-------------------------------------------------------------------------
#
#
#
#-------------------------------------------------------------------------
_LEVEL_NAME = ["", "first", "second", "third", "fourth", "fifth", "sixth",
"seventh", "eighth", "ninth", "tenth", "eleventh", "twelfth",
"thirteenth", "fourteenth", "fifteenth", "sixteenth",
"seventeenth", "eighteenth", "nineteenth", "twentieth",
"twenty-first", "twenty-second", "twenty-third", "twenty-fourth",
"twenty-fifth", "twenty-sixth", "twenty-seventh", "twenty-eighth",
"twenty-ninth", "thirtieth", "thirty-first", "thirty-second",
"thirty-third", "thirty-fourth", "thirty-fifth", "thirty-sixth",
"thirty-seventh", "thirty-eighth", "thirty-ninth", "fortieth",
"forty-first", "forty-second", "forty-third", "forty-fourth",
"forty-fifth", "forty-sixth", "forty-seventh", "forty-eighth",
"forty-ninth", "fiftieth" ]
_REMOVED_LEVEL = ["", " once removed", " twice removed",
" three times removed",
" four times removed", " five times removed",
" six times removed",
" seven times removed", " eight times removed",
" nine times removed",
" ten times removed", " eleven times removed",
" twelve times removed",
" thirteen times removed", " fourteen times removed",
" fifteen times removed",
" sixteen times removed", " seventeen times removed",
" eighteen times removed",
" nineteen times removed", " twenty times removed",
" twenty-one times removed", " twenty-two times removed",
" twenty-three times removed", " twenty-four times removed",
" twenty-five times removed", " twenty-six times removed",
" twenty-seven times removed", " twenty-eight times removed",
" twenty-nine times removed", " thirty times removed",
" thirty-one times removed", " thirty-two times removed",
" thirty-three times removed", " thirty-four times removed",
" thirty-five times removed", " thirty-six times removed",
" thirty-seven times removed", " thirty-eight times removed",
" thirty-nine times removed", " forty times removed",
" forty-one times removed", " forty-two times removed",
" forty-three times removed", " forty-four times removed",
" forty-five times removed", " forty-six times removed",
" forty-seven times removed", " forty-eight times removed",
" forty-nine times removed", " fifty times removed", ]
_PARENTS_LEVEL = ["", "parents", "grandparents", "great grandparents",
"second great grandparents",
"third great grandparents",
"fourth great grandparents",
"fifth great grandparents",
"sixth great grandparents",
"seventh great grandparents",
"eighth great grandparents",
"ninth great grandparents",
"tenth great grandparents",
"eleventh great grandparents",
"twelfth great grandparents",
"thirteenth great grandparents",
"fourteenth great grandparents",
"fifteenth great grandparents",
"sixteenth great grandparents",
"seventeenth great grandparents",
"eighteenth great grandparents",
"nineteenth great grandparents",
"twentieth great grandparents",
"twenty-first great grandparents",
"twenty-second great grandparents",
"twenty-third great grandparents",
"twenty-fourth great grandparents",
"twenty-fifth great grandparents",
"twenty-sixth great grandparents",
"twenty-seventh great grandparents",
"twenty-eighth great grandparents",
"twenty-ninth great grandparents",
"thirtieth great grandparents",
"thirty-first great grandparents",
"thirty-second great grandparents",
"thirty-third great grandparents",
"thirty-fourth great grandparents",
"thirty-fifth great grandparents",
"thirty-sixth great grandparents",
"thirty-seventh great grandparents",
"thirty-eighth great grandparents",
"thirty-ninth great grandparents",
"fortieth great grandparents",
"forty-first great grandparents",
"forty-second great grandparents",
"forty-third great grandparents",
"forty-fourth great grandparents",
"forty-fifth great grandparents",
"forty-sixth great grandparents",
"forty-seventh great grandparents",
"forty-eighth great grandparents",
"forty-ninth great grandparents",
"fiftieth great grandparents", ]
_FATHER_LEVEL = ["", "%(step)sfather%(inlaw)s", "%(step)sgrandfather%(inlaw)s",
"great %(step)sgrandfather%(inlaw)s",
"second great %(step)sgrandfather%(inlaw)s",
"third great %(step)sgrandfather%(inlaw)s",
"fourth great %(step)sgrandfather%(inlaw)s",
"fifth great %(step)sgrandfather%(inlaw)s",
"sixth great %(step)sgrandfather%(inlaw)s",
"seventh great %(step)sgrandfather%(inlaw)s",
"eighth great %(step)sgrandfather%(inlaw)s",
"ninth great %(step)sgrandfather%(inlaw)s",
"tenth great %(step)sgrandfather%(inlaw)s",
"eleventh great %(step)sgrandfather%(inlaw)s",
"twelfth great %(step)sgrandfather%(inlaw)s",
"thirteenth great %(step)sgrandfather%(inlaw)s",
"fourteenth great %(step)sgrandfather%(inlaw)s",
"fifteenth great %(step)sgrandfather%(inlaw)s",
"sixteenth great %(step)sgrandfather%(inlaw)s",
"seventeenth great %(step)sgrandfather%(inlaw)s",
"eighteenth great %(step)sgrandfather%(inlaw)s",
"nineteenth great %(step)sgrandfather%(inlaw)s",
"twentieth great %(step)sgrandfather%(inlaw)s",
"twenty-first great %(step)sgrandfather%(inlaw)s",
"twenty-second great %(step)sgrandfather%(inlaw)s",
"twenty-third great %(step)sgrandfather%(inlaw)s",
"twenty-fourth great %(step)sgrandfather%(inlaw)s",
"twenty-fifth great %(step)sgrandfather%(inlaw)s",
"twenty-sixth great %(step)sgrandfather%(inlaw)s",
"twenty-seventh great %(step)sgrandfather%(inlaw)s",
"twenty-eighth great %(step)sgrandfather%(inlaw)s",
"twenty-ninth great %(step)sgrandfather%(inlaw)s",
"thirtieth great %(step)sgrandfather%(inlaw)s",
"thirty-first great %(step)sgrandfather%(inlaw)s",
"thirty-second great %(step)sgrandfather%(inlaw)s",
"thirty-third great %(step)sgrandfather%(inlaw)s",
"thirty-fourth great %(step)sgrandfather%(inlaw)s",
"thirty-fifth great %(step)sgrandfather%(inlaw)s",
"thirty-sixth great %(step)sgrandfather%(inlaw)s",
"thirty-seventh great %(step)sgrandfather%(inlaw)s",
"thirty-eighth great %(step)sgrandfather%(inlaw)s",
"thirty-ninth great %(step)sgrandfather%(inlaw)s",
"fourtieth great %(step)sgrandfather%(inlaw)s",
"forty-first great %(step)sgrandfather%(inlaw)s",
"forty-second great %(step)sgrandfather%(inlaw)s",
"forty-third great %(step)sgrandfather%(inlaw)s",
"forty-fourth great %(step)sgrandfather%(inlaw)s",
"forty-fifth great %(step)sgrandfather%(inlaw)s",
"forty-sixth great %(step)sgrandfather%(inlaw)s",
"forty-seventh great %(step)sgrandfather%(inlaw)s",
"forty-eighth great %(step)sgrandfather%(inlaw)s",
"forty-ninth great %(step)sgrandfather%(inlaw)s",
"fiftieth great %(step)sgrandfather%(inlaw)s", ]
_MOTHER_LEVEL = ["", "%(step)smother%(inlaw)s",
"%(step)sgrandmother%(inlaw)s",
"great %(step)sgrandmother%(inlaw)s",
"second great %(step)sgrandmother%(inlaw)s",
"third great %(step)sgrandmother%(inlaw)s",
"fourth great %(step)sgrandmother%(inlaw)s",
"fifth great %(step)sgrandmother%(inlaw)s",
"sixth great %(step)sgrandmother%(inlaw)s",
"seventh great %(step)sgrandmother%(inlaw)s",
"eighth great %(step)sgrandmother%(inlaw)s",
"ninth great %(step)sgrandmother%(inlaw)s",
"tenth great %(step)sgrandmother%(inlaw)s",
"eleventh great %(step)sgrandmother%(inlaw)s",
"twelfth great %(step)sgrandmother%(inlaw)s",
"thirteenth great %(step)sgrandmother%(inlaw)s",
"fourteenth great %(step)sgrandmother%(inlaw)s",
"fifteenth great %(step)sgrandmother%(inlaw)s",
"sixteenth great %(step)sgrandmother%(inlaw)s",
"seventeenth great %(step)sgrandmother%(inlaw)s",
"eighteenth great %(step)sgrandmother%(inlaw)s",
"nineteenth great %(step)sgrandmother%(inlaw)s",
"twentieth great %(step)sgrandmother%(inlaw)s",
"twenty-first great %(step)sgrandmother%(inlaw)s",
"twenty-second great %(step)sgrandmother%(inlaw)s",
"twenty-third great %(step)sgrandmother%(inlaw)s",
"twenty-fourth great %(step)sgrandmother%(inlaw)s",
"twenty-fifth great %(step)sgrandmother%(inlaw)s",
"twenty-sixth great %(step)sgrandmother%(inlaw)s",
"twenty-seventh great %(step)sgrandmother%(inlaw)s",
"twenty-eighth great %(step)sgrandmother%(inlaw)s",
"twenty-ninth great %(step)sgrandmother%(inlaw)s",
"thirtieth great %(step)sgrandmother%(inlaw)s",
"thirty-first great %(step)sgrandmother%(inlaw)s",
"thirty-second great %(step)sgrandmother%(inlaw)s",
"thirty-third great %(step)sgrandmother%(inlaw)s",
"thirty-forth great %(step)sgrandmother%(inlaw)s",
"thirty-fifth great %(step)sgrandmother%(inlaw)s",
"thirty-sixth great %(step)sgrandmother%(inlaw)s",
"thirty-seventh great %(step)sgrandmother%(inlaw)s",
"thirty-eighth great %(step)sgrandmother%(inlaw)s",
"thirty-ninth great %(step)sgrandmother%(inlaw)s",
"fourtieth great %(step)sgrandmother%(inlaw)s",
"forty-first great %(step)sgrandmother%(inlaw)s",
"forty-second great %(step)sgrandmother%(inlaw)s",
"forty-third great %(step)sgrandmother%(inlaw)s",
"forty-fourth great %(step)sgrandmother%(inlaw)s",
"forty-fifth great %(step)sgrandmother%(inlaw)s",
"forty-sixth great %(step)sgrandmother%(inlaw)s",
"forty-seventh great %(step)sgrandmother%(inlaw)s",
"forty-eighth great %(step)sgrandmother%(inlaw)s",
"forty-ninth great %(step)sgrandmother%(inlaw)s",
"fiftieth great %(step)sgrandmother%(inlaw)s", ]
_SON_LEVEL = ["", "%(step)sson%(inlaw)s", "%(step)sgrandson%(inlaw)s",
"great %(step)sgrandson%(inlaw)s",
"second great %(step)sgrandson%(inlaw)s",
"third great %(step)sgrandson%(inlaw)s",
"fourth great %(step)sgrandson%(inlaw)s",
"fifth great %(step)sgrandson%(inlaw)s",
"sixth great %(step)sgrandson%(inlaw)s",
"seventh great %(step)sgrandson%(inlaw)s",
"eighth great %(step)sgrandson%(inlaw)s",
"ninth great %(step)sgrandson%(inlaw)s",
"tenth great %(step)sgrandson%(inlaw)s",
"eleventh great %(step)sgrandson%(inlaw)s",
"twelfth great %(step)sgrandson%(inlaw)s",
"thirteenth great %(step)sgrandson%(inlaw)s",
"fourteenth great %(step)sgrandson%(inlaw)s",
"fifteenth great %(step)sgrandson%(inlaw)s",
"sixteenth great %(step)sgrandson%(inlaw)s",
"seventeenth great %(step)sgrandson%(inlaw)s",
"eighteenth great %(step)sgrandson%(inlaw)s",
"nineteenth great %(step)sgrandson%(inlaw)s",
"twentieth great %(step)sgrandson%(inlaw)s",
"twenty-first great %(step)sgrandson%(inlaw)s",
"twenty-second great %(step)sgrandson%(inlaw)s",
"twenty-third great %(step)sgrandson%(inlaw)s",
"twenty-fourth great %(step)sgrandson%(inlaw)s",
"twenty-fifth great %(step)sgrandson%(inlaw)s",
"twenty-sixth great %(step)sgrandson%(inlaw)s",
"twenty-seventh great %(step)sgrandson%(inlaw)s",
"twenty-eighth great %(step)sgrandson%(inlaw)s",
"twenty-ninth great %(step)sgrandson%(inlaw)s",
"thirtieth great %(step)sgrandson%(inlaw)s",
"thirty-first great %(step)sgrandson%(inlaw)s",
"thirty-second great %(step)sgrandson%(inlaw)s",
"thirty-third great %(step)sgrandson%(inlaw)s",
"thirty-forth great %(step)sgrandson%(inlaw)s",
"thirty-fifth great %(step)sgrandson%(inlaw)s",
"thirty-sixth great %(step)sgrandson%(inlaw)s",
"thirty-seventh great %(step)sgrandson%(inlaw)s",
"thirty-eighth great %(step)sgrandson%(inlaw)s",
"thirty-ninth great %(step)sgrandson%(inlaw)s",
"fourtieth great %(step)sgrandson%(inlaw)s",
"forty-first great %(step)sgrandson%(inlaw)s",
"forty-second great %(step)sgrandson%(inlaw)s",
"forty-third great %(step)sgrandson%(inlaw)s",
"forty-fourth great %(step)sgrandson%(inlaw)s",
"forty-fifth great %(step)sgrandson%(inlaw)s",
"forty-sixth great %(step)sgrandson%(inlaw)s",
"forty-seventh great %(step)sgrandson%(inlaw)s",
"forty-eighth great %(step)sgrandson%(inlaw)s",
"forty-ninth great %(step)sgrandson%(inlaw)s",
"fiftieth great %(step)sgrandson%(inlaw)s", ]
_DAUGHTER_LEVEL = ["", "%(step)sdaughter%(inlaw)s",
"%(step)sgranddaughter%(inlaw)s",
"great %(step)sgranddaughter%(inlaw)s",
"second great %(step)sgranddaughter%(inlaw)s",
"third great %(step)sgranddaughter%(inlaw)s",
"fourth great %(step)sgranddaughter%(inlaw)s",
"fifth great %(step)sgranddaughter%(inlaw)s",
"sixth great %(step)sgranddaughter%(inlaw)s",
"seventh great %(step)sgranddaughter%(inlaw)s",
"eighth great %(step)sgranddaughter%(inlaw)s",
"ninth great %(step)sgranddaughter%(inlaw)s",
"tenth great %(step)sgranddaughter%(inlaw)s",
"eleventh great %(step)sgranddaughter%(inlaw)s",
"twelfth great %(step)sgranddaughter%(inlaw)s",
"thirteenth great %(step)sgranddaughter%(inlaw)s",
"fourteenth great %(step)sgranddaughter%(inlaw)s",
"fifteenth great %(step)sgranddaughter%(inlaw)s",
"sixteenth great %(step)sgranddaughter%(inlaw)s",
"seventeenth great %(step)sgranddaughter%(inlaw)s",
"eighteenth great %(step)sgranddaughter%(inlaw)s",
"nineteenth great %(step)sgranddaughter%(inlaw)s",
"twentieth great %(step)sgranddaughter%(inlaw)s",
"twenty-first great %(step)sgranddaughter%(inlaw)s",
"twenty-second great %(step)sgranddaughter%(inlaw)s",
"twenty-third great %(step)sgranddaughter%(inlaw)s",
"twenty-fourth great %(step)sgranddaughter%(inlaw)s",
"twenty-fifth great %(step)sgranddaughter%(inlaw)s",
"twenty-sixth great %(step)sgranddaughter%(inlaw)s",
"twenty-seventh great %(step)sgranddaughter%(inlaw)s",
"twenty-eighth great %(step)sgranddaughter%(inlaw)s",
"twenty-ninth great %(step)sgranddaughter%(inlaw)s",
"thirtieth great %(step)sgranddaughter%(inlaw)s",
"thirty-first great %(step)sgranddaughter%(inlaw)s",
"thirty-second great %(step)sgranddaughter%(inlaw)s",
"thirty-third great %(step)sgranddaughter%(inlaw)s",
"thirty-forth great %(step)sgranddaughter%(inlaw)s",
"thirty-fifth great %(step)sgranddaughter%(inlaw)s",
"thirty-sixth great %(step)sgranddaughter%(inlaw)s",
"thirty-seventh great %(step)sgranddaughter%(inlaw)s",
"thirty-eighth great %(step)sgranddaughter%(inlaw)s",
"thirty-ninth great %(step)sgranddaughter%(inlaw)s",
"fourtieth great %(step)sgranddaughter%(inlaw)s",
"forty-first great %(step)sgranddaughter%(inlaw)s",
"forty-second great %(step)sgranddaughter%(inlaw)s",
"forty-third great %(step)sgranddaughter%(inlaw)s",
"forty-fourth great %(step)sgranddaughter%(inlaw)s",
"forty-fifth great %(step)sgranddaughter%(inlaw)s",
"forty-sixth great %(step)sgranddaughter%(inlaw)s",
"forty-seventh great %(step)sgranddaughter%(inlaw)s",
"forty-eighth great %(step)sgranddaughter%(inlaw)s",
"forty-ninth great %(step)sgranddaughter%(inlaw)s",
"fiftieth great %(step)sgranddaughter%(inlaw)s", ]
_SISTER_LEVEL = ["", "%(step)ssister%(inlaw)s", "%(step)saunt%(inlaw)s",
"%(step)sgrandaunt%(inlaw)s",
"great %(step)sgrandaunt%(inlaw)s",
"second great %(step)sgrandaunt%(inlaw)s",
"third great %(step)sgrandaunt%(inlaw)s",
"fourth great %(step)sgrandaunt%(inlaw)s",
"fifth great %(step)sgrandaunt%(inlaw)s",
"sixth great %(step)sgrandaunt%(inlaw)s",
"seventh great %(step)sgrandaunt%(inlaw)s",
"eighth great %(step)sgrandaunt%(inlaw)s",
"ninth great %(step)sgrandaunt%(inlaw)s",
"tenth great %(step)sgrandaunt%(inlaw)s",
"eleventh great %(step)sgrandaunt%(inlaw)s",
"twelfth great %(step)sgrandaunt%(inlaw)s",
"thirteenth great %(step)sgrandaunt%(inlaw)s",
"fourteenth great %(step)sgrandaunt%(inlaw)s",
"fifteenth great %(step)sgrandaunt%(inlaw)s",
"sixteenth great %(step)sgrandaunt%(inlaw)s",
"seventeenth great %(step)sgrandaunt%(inlaw)s",
"eighteenth great %(step)sgrandaunt%(inlaw)s",
"nineteenth great %(step)sgrandaunt%(inlaw)s",
"twentieth great %(step)sgrandaunt%(inlaw)s",
"twenty-first great %(step)sgrandaunt%(inlaw)s",
"twenty-second great %(step)sgrandaunt%(inlaw)s",
"twenty-third great %(step)sgrandaunt%(inlaw)s",
"twenty-fourth great %(step)sgrandaunt%(inlaw)s",
"twenty-fifth great %(step)sgrandaunt%(inlaw)s",
"twenty-sixth great %(step)sgrandaunt%(inlaw)s",
"twenty-seventh great %(step)sgrandaunt%(inlaw)s",
"twenty-eighth great %(step)sgrandaunt%(inlaw)s",
"twenty-ninth great %(step)sgrandaunt%(inlaw)s",
"thirtieth great %(step)sgrandaunt%(inlaw)s",
"thirty-first great %(step)sgrandaunt%(inlaw)s",
"thirty-second great %(step)sgrandaunt%(inlaw)s",
"thirty-third great %(step)sgrandaunt%(inlaw)s",
"thirty-forth great %(step)sgrandaunt%(inlaw)s",
"thirty-fifth great %(step)sgrandaunt%(inlaw)s",
"thirty-sixth great %(step)sgrandaunt%(inlaw)s",
"thirty-seventh great %(step)sgrandaunt%(inlaw)s",
"thirty-eighth great %(step)sgrandaunt%(inlaw)s",
"thirty-ninth great %(step)sgrandaunt%(inlaw)s",
"fourtieth great %(step)sgrandaunt%(inlaw)s",
"forty-first great %(step)sgrandaunt%(inlaw)s",
"forty-second great %(step)sgrandaunt%(inlaw)s",
"forty-third great %(step)sgrandaunt%(inlaw)s",
"forty-fourth great %(step)sgrandaunt%(inlaw)s",
"forty-fifth great %(step)sgrandaunt%(inlaw)s",
"forty-sixth great %(step)sgrandaunt%(inlaw)s",
"forty-seventh great %(step)sgrandaunt%(inlaw)s",
"forty-eighth great %(step)sgrandaunt%(inlaw)s",
"forty-ninth great %(step)sgrandaunt%(inlaw)s",
"fiftieth great %(step)sgrandaunt%(inlaw)s", ]
_BROTHER_LEVEL = ["", "%(step)sbrother%(inlaw)s", "%(step)suncle%(inlaw)s",
"%(step)sgranduncle%(inlaw)s",
"great %(step)sgranduncle%(inlaw)s",
"second great %(step)sgranduncle%(inlaw)s",
"third great %(step)sgranduncle%(inlaw)s",
"fourth great %(step)sgranduncle%(inlaw)s",
"fifth great %(step)sgranduncle%(inlaw)s",
"sixth great %(step)sgranduncle%(inlaw)s",
"seventh great %(step)sgranduncle%(inlaw)s",
"eighth great %(step)sgranduncle%(inlaw)s",
"ninth great %(step)sgranduncle%(inlaw)s",
"tenth great %(step)sgranduncle%(inlaw)s",
"eleventh great %(step)sgranduncle%(inlaw)s",
"twelfth great %(step)sgranduncle%(inlaw)s",
"thirteenth great %(step)sgranduncle%(inlaw)s",
"fourteenth great %(step)sgranduncle%(inlaw)s",
"fifteenth great %(step)sgranduncle%(inlaw)s",
"sixteenth great %(step)sgranduncle%(inlaw)s",
"seventeenth great %(step)sgranduncle%(inlaw)s",
"eighteenth great %(step)sgranduncle%(inlaw)s",
"nineteenth great %(step)sgranduncle%(inlaw)s",
"twentieth great %(step)sgranduncle%(inlaw)s",
"twenty-first great %(step)sgranduncle%(inlaw)s",
"twenty-second great %(step)sgranduncle%(inlaw)s",
"twenty-third great %(step)sgranduncle%(inlaw)s",
"twenty-fourth great %(step)sgranduncle%(inlaw)s",
"twenty-fifth great %(step)sgranduncle%(inlaw)s",
"twenty-sixth great %(step)sgranduncle%(inlaw)s",
"twenty-seventh great %(step)sgranduncle%(inlaw)s",
"twenty-eighth great %(step)sgranduncle%(inlaw)s",
"twenty-ninth great %(step)sgranduncle%(inlaw)s",
"thirtieth great %(step)sgranduncle%(inlaw)s",
"thirty-first great %(step)sgranduncle%(inlaw)s",
"thirty-second great %(step)sgranduncle%(inlaw)s",
"thirty-third great %(step)sgranduncle%(inlaw)s",
"thirty-fourth great %(step)sgranduncle%(inlaw)s",
"thirty-fifth great %(step)sgranduncle%(inlaw)s",
"thirty-sixth great %(step)sgranduncle%(inlaw)s",
"thirty-seventh great %(step)sgranduncle%(inlaw)s",
"thirty-eighth great %(step)sgranduncle%(inlaw)s",
"thirty-ninth great %(step)sgranduncle%(inlaw)s",
"fourtieth great %(step)sgranduncle%(inlaw)s",
"forty-first great %(step)sgranduncle%(inlaw)s",
"forty-second great %(step)sgranduncle%(inlaw)s",
"forty-third great %(step)sgranduncle%(inlaw)s",
"forty-fourth great %(step)sgranduncle%(inlaw)s",
"forty-fifth great %(step)sgranduncle%(inlaw)s",
"forty-sixth great %(step)sgranduncle%(inlaw)s",
"forty-seventh great %(step)sgranduncle%(inlaw)s",
"forty-eighth great %(step)sgranduncle%(inlaw)s",
"forty-ninth great %(step)sgranduncle%(inlaw)s",
"fiftieth great %(step)sgranduncle%(inlaw)s", ]
_NEPHEW_LEVEL = ["", "%(step)snephew%(inlaw)s", "%(step)sgrandnephew%(inlaw)s",
"great %(step)sgrandnephew%(inlaw)s",
"second great %(step)sgrandnephew%(inlaw)s",
"third great %(step)sgrandnephew%(inlaw)s",
"fourth great %(step)sgrandnephew%(inlaw)s",
"fifth great %(step)sgrandnephew%(inlaw)s",
"sixth great %(step)sgrandnephew%(inlaw)s",
"seventh great %(step)sgrandnephew%(inlaw)s",
"eighth great %(step)sgrandnephew%(inlaw)s",
"ninth great %(step)sgrandnephew%(inlaw)s",
"tenth great %(step)sgrandnephew%(inlaw)s",
"eleventh great %(step)sgrandnephew%(inlaw)s",
"twelfth great %(step)sgrandnephew%(inlaw)s",
"thirteenth great %(step)sgrandnephew%(inlaw)s",
"fourteenth great %(step)sgrandnephew%(inlaw)s",
"fifteenth great %(step)sgrandnephew%(inlaw)s",
"sixteenth great %(step)sgrandnephew%(inlaw)s",
"seventeenth great %(step)sgrandnephew%(inlaw)s",
"eighteenth great %(step)sgrandnephew%(inlaw)s",
"nineteenth great %(step)sgrandnephew%(inlaw)s",
"twentieth great %(step)sgrandnephew%(inlaw)s",
"twenty-first great %(step)sgrandnephew%(inlaw)s",
"twenty-second great %(step)sgrandnephew%(inlaw)s",
"twenty-third great %(step)sgrandnephew%(inlaw)s",
"twenty-fourth great %(step)sgrandnephew%(inlaw)s",
"twenty-fifth great %(step)sgrandnephew%(inlaw)s",
"twenty-sixth great %(step)sgrandnephew%(inlaw)s",
"twenty-seventh great %(step)sgrandnephew%(inlaw)s",
"twenty-eighth great %(step)sgrandnephew%(inlaw)s",
"twenty-ninth great %(step)sgrandnephew%(inlaw)s",
"thirtieth great %(step)sgrandnephew%(inlaw)s",
"thirty-first great %(step)sgrandnephew%(inlaw)s",
"thirty-second great %(step)sgrandnephew%(inlaw)s",
"thirty-third great %(step)sgrandnephew%(inlaw)s",
"thirty-fourth great %(step)sgrandnephew%(inlaw)s",
"thirty-fifth great %(step)sgrandnephew%(inlaw)s",
"thirty-sixth great %(step)sgrandnephew%(inlaw)s",
"thirty-seventh great %(step)sgrandnephew%(inlaw)s",
"thirty-eighth great %(step)sgrandnephew%(inlaw)s",
"thirty-ninth great %(step)sgrandnephew%(inlaw)s",
"fourtieth great %(step)sgrandnephew%(inlaw)s",
"forty-first great %(step)sgrandnephew%(inlaw)s",
"forty-second great %(step)sgrandnephew%(inlaw)s",
"forty-third great %(step)sgrandnephew%(inlaw)s",
"forty-fourth great %(step)sgrandnephew%(inlaw)s",
"forty-fifth great %(step)sgrandnephew%(inlaw)s",
"forty-sixth great %(step)sgrandnephew%(inlaw)s",
"forty-seventh great %(step)sgrandnephew%(inlaw)s",
"forty-eighth great %(step)sgrandnephew%(inlaw)s",
"forty-ninth great %(step)sgrandnephew%(inlaw)s",
"fiftieth great %(step)sgrandnephew%(inlaw)s", ]
_NIECE_LEVEL = ["", "%(step)sniece%(inlaw)s", "%(step)sgrandniece%(inlaw)s",
"great %(step)sgrandniece%(inlaw)s",
"second great %(step)sgrandniece%(inlaw)s",
"third great %(step)sgrandniece%(inlaw)s",
"fourth great %(step)sgrandniece%(inlaw)s",
"fifth great %(step)sgrandniece%(inlaw)s",
"sixth great %(step)sgrandniece%(inlaw)s",
"seventh great %(step)sgrandniece%(inlaw)s",
"eighth great %(step)sgrandniece%(inlaw)s",
"ninth great %(step)sgrandniece%(inlaw)s",
"tenth great %(step)sgrandniece%(inlaw)s",
"eleventh great %(step)sgrandniece%(inlaw)s",
"twelfth great %(step)sgrandniece%(inlaw)s",
"thirteenth great %(step)sgrandniece%(inlaw)s",
"fourteenth great %(step)sgrandniece%(inlaw)s",
"fifteenth great %(step)sgrandniece%(inlaw)s",
"sixteenth great %(step)sgrandniece%(inlaw)s",
"seventeenth great %(step)sgrandniece%(inlaw)s",
"eighteenth great %(step)sgrandniece%(inlaw)s",
"nineteenth great %(step)sgrandniece%(inlaw)s",
"twentieth great %(step)sgrandniece%(inlaw)s",
"twenty-first great %(step)sgrandniece%(inlaw)s",
"twenty-second great %(step)sgrandniece%(inlaw)s",
"twenty-third great %(step)sgrandniece%(inlaw)s",
"twenty-fourth great %(step)sgrandniece%(inlaw)s",
"twenty-fifth great %(step)sgrandniece%(inlaw)s",
"twenty-sixth great %(step)sgrandniece%(inlaw)s",
"twenty-seventh great %(step)sgrandniece%(inlaw)s",
"twenty-eighth great %(step)sgrandniece%(inlaw)s",
"twenty-ninth great %(step)sgrandniece%(inlaw)s",
"thirtieth great %(step)sgrandniece%(inlaw)s",
"thirty-first great %(step)sgrandniece%(inlaw)s",
"thirty-second great %(step)sgrandniece%(inlaw)s",
"thirty-third great %(step)sgrandniece%(inlaw)s",
"thirty-fourth great %(step)sgrandniece%(inlaw)s",
"thirty-fifth great %(step)sgrandniece%(inlaw)s",
"thirty-sixth great %(step)sgrandniece%(inlaw)s",
"thirty-seventh great %(step)sgrandniece%(inlaw)s",
"thirty-eighth great %(step)sgrandniece%(inlaw)s",
"thirty-ninth great %(step)sgrandniece%(inlaw)s",
"fourtieth great %(step)sgrandniece%(inlaw)s",
"forty-first great %(step)sgrandniece%(inlaw)s",
"forty-second great %(step)sgrandniece%(inlaw)s",
"forty-third great %(step)sgrandniece%(inlaw)s",
"forty-fourth great %(step)sgrandniece%(inlaw)s",
"forty-fifth great %(step)sgrandniece%(inlaw)s",
"forty-sixth great %(step)sgrandniece%(inlaw)s",
"forty-seventh great %(step)sgrandniece%(inlaw)s",
"forty-eighth great %(step)sgrandniece%(inlaw)s",
"forty-ninth great %(step)sgrandniece%(inlaw)s",
"fiftieth great %(step)sgrandniece%(inlaw)s", ]
_CHILDREN_LEVEL = ["",
"children",
"grandchildren",
"great grandchildren",
"second great grandchildren",
"third great grandchildren",
"fourth great grandchildren",
"fifth great grandchildren",
"sixth great grandchildren",
"seventh great grandchildren",
"eighth great grandchildren",
"ninth great grandchildren",
"tenth great grandchildren",
"eleventh great grandchildren",
"twelfth great grandchildren",
"thirteenth great grandchildren",
"fourteenth great grandchildren",
"fifteenth great grandchildren",
"sixteenth great grandchildren",
"seventeenth great grandchildren",
"eighteenth great grandchildren",
"nineteenth great grandchildren",
"twentieth great grandchildren",
"twenty-first great grandchildren",
"twenty-second great grandchildren",
"twenty-third great grandchildren",
"twenty-fourth great grandchildren",
"twenty-fifth great grandchildren",
"twenty-sixth great grandchildren",
"twenty-seventh great grandchildren",
"twenty-eighth great grandchildren",
"twenty-ninth great grandchildren",
"thirtieth great grandchildren",
"thirty-first great grandchildren",
"thirty-second great grandchildren",
"thirty-third great grandchildren",
"thirty-fourth great grandchildren",
"thirty-fifth great grandchildren",
"thirty-sixth great grandchildren",
"thirty-seventh great grandchildren",
"thirty-eighth great grandchildren",
"thirty-ninth great grandchildren",
"fourtieth great grandchildren",
"forty-first great grandchildren",
"forty-second great grandchildren",
"forty-third great grandchildren",
"forty-fourth great grandchildren",
"forty-fifth great grandchildren",
"forty-sixth great grandchildren",
"forty-seventh great grandchildren",
"forty-eighth great grandchildren",
"forty-ninth great grandchildren",
"fiftieth great grandchildren", ]
_SIBLINGS_LEVEL = ["",
"siblings",
"uncles/aunts",
"granduncles/aunts",
"great granduncles/aunts",
"second great granduncles/aunts",
"third great granduncles/aunts",
"fourth great granduncles/aunts",
"fifth great granduncles/aunts",
"sixth great granduncles/aunts",
"seventh great granduncles/aunts",
"eighth great granduncles/aunts",
"ninth great granduncles/aunts",
"tenth great granduncles/aunts",
"eleventh great granduncles/aunts",
"twelfth great granduncles/aunts",
"thirteenth great granduncles/aunts",
"fourteenth great granduncles/aunts",
"fifteenth great granduncles/aunts",
"sixteenth great granduncles/aunts",
"seventeenth great granduncles/aunts",
"eighteenth great granduncles/aunts",
"nineteenth great granduncles/aunts",
"twentieth great granduncles/aunts",
"twenty-first great granduncles/aunts",
"twenty-second great granduncles/aunts",
"twenty-third great granduncles/aunts",
"twenty-fourth great granduncles/aunts",
"twenty-fifth great granduncles/aunts",
"twenty-sixth great granduncles/aunts",
"twenty-seventh great granduncles/aunts",
"twenty-eighth great granduncles/aunts",
"twenty-ninth great granduncles/aunts",
"thirtieth great granduncles/aunts",
"thirty-first great granduncles/aunts",
"thirty-second great granduncles/aunts",
"thirty-third great granduncles/aunts",
"thirty-fourth great granduncles/aunts",
"thirty-fifth great granduncles/aunts",
"thirty-sixth great granduncles/aunts",
"thirty-seventh great granduncles/aunts",
"thirty-eighth great granduncles/aunts",
"thirty-ninth great granduncles/aunts",
"fortieth great granduncles/aunts",
"forty-first great granduncles/aunts",
"forty-second great granduncles/aunts",
"forty-third great granduncles/aunts",
"forty-fourth great granduncles/aunts",
"forty-fifth great granduncles/aunts",
"forty-sixth great granduncles/aunts",
"forty-seventh great granduncles/aunts",
"forty-eighth great granduncles/aunts",
"forty-ninth great granduncles/aunts",
"fiftienth great granduncles/aunts", ]
_SIBLING_LEVEL = ["",
"%(step)ssibling%(inlaw)s",
"%(step)suncle/aunt%(inlaw)s",
"%(step)sgranduncle/aunt%(inlaw)s",
"great %(step)sgranduncle/aunt%(inlaw)s",
"second great %(step)sgranduncle/aunt%(inlaw)s",
"third great %(step)sgranduncle/aunt%(inlaw)s",
"fourth great %(step)sgranduncle/aunt%(inlaw)s",
"fifth great %(step)sgranduncle/aunt%(inlaw)s",
"sixth great %(step)sgranduncle/aunt%(inlaw)s",
"seventh great %(step)sgranduncle/aunt%(inlaw)s",
"eighth great %(step)sgranduncle/aunt%(inlaw)s",
"ninth great %(step)sgranduncle/aunt%(inlaw)s",
"tenth great %(step)sgranduncle/aunt%(inlaw)s",
"eleventh great %(step)sgranduncle/aunt%(inlaw)s",
"twelfth great %(step)sgranduncle/aunt%(inlaw)s",
"thirteenth great %(step)sgranduncle/aunt%(inlaw)s",
"fourteenth great %(step)sgranduncle/aunt%(inlaw)s",
"fifteenth great %(step)sgranduncle/aunt%(inlaw)s",
"sixteenth great %(step)sgranduncle/aunt%(inlaw)s",
"seventeenth great %(step)sgranduncle/aunt%(inlaw)s",
"eighteenth great %(step)sgranduncle/aunt%(inlaw)s",
"nineteenth great %(step)sgranduncle/aunt%(inlaw)s",
"twentieth great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-first great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-second great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-third great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-fourth great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-fifth great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-sixth great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-seventh great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-eighth great %(step)sgranduncle/aunt%(inlaw)s",
"twenty-ninth great %(step)sgranduncle/aunt%(inlaw)s",
"thirtieth great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-first great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-second great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-third great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-fourth great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-fifth great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-sixth great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-seventh great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-eighth great %(step)sgranduncle/aunt%(inlaw)s",
"thirty-ninth great %(step)sgranduncle/aunt%(inlaw)s",
"fortieth great %(step)sgranduncle/aunt%(inlaw)s",
"forty-first great %(step)sgranduncle/aunt%(inlaw)s",
"forty-second great %(step)sgranduncle/aunt%(inlaw)s",
"forty-third great %(step)sgranduncle/aunt%(inlaw)s",
"forty-fourth great %(step)sgranduncle/aunt%(inlaw)s",
"forty-fifth great %(step)sgranduncle/aunt%(inlaw)s",
"forty-sixth great %(step)sgranduncle/aunt%(inlaw)s",
"forty-seventh great %(step)sgranduncle/aunt%(inlaw)s",
"forty-eighth great %(step)sgranduncle/aunt%(inlaw)s",
"forty-ninth great %(step)sgranduncle/aunt%(inlaw)s",
"fiftieth great %(step)sgranduncle/aunt%(inlaw)s", ]
_NEPHEWS_NIECES_LEVEL = ["",
"siblings",
"nephews/nieces",
"grandnephews/nieces",
"great grandnephews/nieces",
"second great grandnephews/nieces",
"third great grandnephews/nieces",
"fourth great grandnephews/nieces",
"fifth great grandnephews/nieces",
"sixth great grandnephews/nieces",
"seventh great grandnephews/nieces",
"eighth great grandnephews/nieces",
"ninth great grandnephews/nieces",
"tenth great grandnephews/nieces",
"eleventh great grandnephews/nieces",
"twelfth great grandnephews/nieces",
"thirteenth great grandnephews/nieces",
"fourteenth great grandnephews/nieces",
"fifteenth great grandnephews/nieces",
"sixteenth great grandnephews/nieces",
"seventeenth great grandnephews/nieces",
"eighteenth great grandnephews/nieces",
"nineteenth great grandnephews/nieces",
"twentieth great grandnephews/nieces",
"twenty-first great grandnephews/nieces",
"twenty-second great grandnephews/nieces",
"twenty-third great grandnephews/nieces",
"twenty-fourth great grandnephews/nieces",
"twenty-fifth great grandnephews/nieces",
"twenty-sixth great grandnephews/nieces",
"twenty-seventh great grandnephews/nieces",
"twenty-eighth great grandnephews/nieces",
"twenty-ninth great grandnephews/nieces",
"thirtieth great grandnephews/nieces",
"thirty-first great grandnephews/nieces",
"thirty-second great grandnephews/nieces",
"thirty-third great grandnephews/nieces",
"thirty-fourth great grandnephews/nieces",
"thirty-fifth great grandnephews/nieces",
"thirty-sixth great grandnephews/nieces",
"thirty-seventh great grandnephews/nieces",
"thirty-eighth great grandnephews/nieces",
"thirty-ninth great grandnephews/nieces",
"fortieth great grandnephews/nieces",
"forty-first great grandnephews/nieces",
"forty-second great grandnephews/nieces",
"forty-third great grandnephews/nieces",
"forty-fourth great grandnephews/nieces",
"forty-fifth great grandnephews/nieces",
"forty-sixth great grandnephews/nieces",
"forty-seventh great grandnephews/nieces",
"forty-eighth great grandnephews/nieces",
"forty-ninth great grandnephews/nieces",
"fiftieth great grandnephews/nieces", ]
#-------------------------------------------------------------------------
#
# RelationshipCalculator
#
#-------------------------------------------------------------------------
class RelationshipCalculator:
"""
The relationship calculator helps to determine the relationship between
two people.
"""
REL_MOTHER = 'm' # going up to mother
REL_FATHER = 'f' # going up to father
REL_MOTHER_NOTBIRTH = 'M' # going up to mother, not birth relation
REL_FATHER_NOTBIRTH = 'F' # going up to father, not birth relation
REL_SIBLING = 's' # going sideways to sibling (no parents)
REL_FAM_BIRTH = 'a' # going up to family (mother and father)
REL_FAM_NONBIRTH = 'A' # going up to family, not birth relation
REL_FAM_BIRTH_MOTH_ONLY = 'b' # going up to fam, only birth rel to mother
REL_FAM_BIRTH_FATH_ONLY = 'c' # going up to fam, only birth rel to father
REL_FAM_INLAW_PREFIX = 'L' # going to the partner.
#sibling types
NORM_SIB = 0 # same birth parents
HALF_SIB_MOTHER = 1 # same mother, father known to be different
HALF_SIB_FATHER = 2 # same father, mother known to be different
STEP_SIB = 3 # birth parents known to be different
UNKNOWN_SIB = 4 # insufficient data to draw conclusion
#sibling strings
STEP = 'step'
HALF = 'half-'
INLAW = '-in-law'
#partner types
PARTNER_MARRIED = 1
PARTNER_UNMARRIED = 2
PARTNER_CIVIL_UNION = 3
PARTNER_UNKNOWN_REL = 4
PARTNER_EX_MARRIED = 5
PARTNER_EX_UNMARRIED = 6
PARTNER_EX_CIVIL_UNION = 7
PARTNER_EX_UNKNOWN_REL = 8
def __init__(self):
self.signal_keys = []
self.state_signal_key = None
self.storemap = False
self.dirtymap = True
self.stored_map = None
self.map_handle = None
self.map_meta = None
self.__db_connected = False
self.depth = 15
try:
from .config import config
self.set_depth(config.get('behavior.generation-depth'))
except ImportError:
pass
#data storage to communicate with recursive functions
self.__max_depth_reached = False
self.__loop_detected = False
self.__max_depth = 0
self.__all_families = False
self.__all_dist = False
self.__only_birth = False
self.__crosslinks = False
self.__msg = []
def set_depth(self, depth):
"""
Set how deep relationships must be searched. Input must be an
integer > 0
"""
if depth != self.depth:
self.depth = depth
self.dirtymap = True
def get_depth(self):
"""
Obtain depth of relationship search
"""
return self.depth
DIST_FATHER = "distant %(step)sancestor%(inlaw)s (%(level)d generations)"
def _get_father(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_FATHER_LEVEL) - 1:
return self.DIST_FATHER % {'step': step, 'inlaw': inlaw,
'level': level}
else:
return _FATHER_LEVEL[level] % {'step': step, 'inlaw': inlaw}
DIST_SON = "distant %(step)sdescendant%(inlaw)s (%(level)d generations)"
def _get_son(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_SON_LEVEL) - 1:
return self.DIST_SON % {'step': step, 'inlaw': inlaw,
'level': level}
else:
return _SON_LEVEL[level] % {'step': step, 'inlaw': inlaw}
DIST_MOTHER = "distant %(step)sancestor%(inlaw)s (%(level)d generations)"
def _get_mother(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_MOTHER_LEVEL) - 1:
return self.DIST_MOTHER % {'step': step, 'inlaw': inlaw,
'level': level}
else:
return _MOTHER_LEVEL[level] % {'step': step, 'inlaw': inlaw}
DIST_DAUGHTER = "distant %(step)sdescendant%(inlaw)s (%(level)d generations)"
def _get_daughter(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_DAUGHTER_LEVEL) - 1:
return self.DIST_DAUGHTER % {'step': step, 'inlaw': inlaw,
'level': level}
else:
return _DAUGHTER_LEVEL[level] % {'step': step, 'inlaw': inlaw}
def _get_parent_unknown(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level < len(_LEVEL_NAME):
return _LEVEL_NAME[level] + ' ' + '%sancestor%s' % (step, inlaw)
else:
return "distant %sancestor%s (%d generations)" % (step, inlaw,
level)
DIST_CHILD = "distant %(step)sdescendant (%(level)d generations)"
def _get_child_unknown(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level < len(_LEVEL_NAME):
return _LEVEL_NAME[level] + ' ' + '%(step)sdescendant%(inlaw)s' % {
'step': step, 'inlaw': inlaw}
else:
return self.DIST_CHILD % {'step': step, 'level': level}
DIST_AUNT = "distant %(step)saunt%(inlaw)s"
def _get_aunt(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_SISTER_LEVEL) - 1:
return self.DIST_AUNT % {'step': step, 'inlaw': inlaw}
else:
return _SISTER_LEVEL[level] % {'step': step, 'inlaw': inlaw}
DIST_UNCLE = "distant %(step)suncle%(inlaw)s"
def _get_uncle(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_BROTHER_LEVEL) - 1:
return self.DIST_UNCLE % {'step': step, 'inlaw': inlaw}
else:
return _BROTHER_LEVEL[level] % {'step': step, 'inlaw': inlaw}
DIST_NEPHEW = "distant %(step)snephew%(inlaw)s"
def _get_nephew(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_NEPHEW_LEVEL) - 1:
return self.DIST_NEPHEW % {'step': step, 'inlaw': inlaw}
else:
return _NEPHEW_LEVEL[level] % {'step': step, 'inlaw': inlaw}
DIST_NIECE = "distant %(step)sniece%(inlaw)s"
def _get_niece(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level > len(_NIECE_LEVEL) - 1:
return self.DIST_NIECE % {'step': step, 'inlaw': inlaw}
else:
return _NIECE_LEVEL[level] % {'step': step, 'inlaw': inlaw}
def _get_cousin(self, level, removed, dir='', step='', inlaw=''):
"""
Internal english method to create relation string
"""
if removed == 0 and level < len(_LEVEL_NAME):
return "%s %scousin%s" % (_LEVEL_NAME[level], step, inlaw)
elif removed > len(_REMOVED_LEVEL)-1 or level > len(_LEVEL_NAME)-1:
return "distant %srelative%s" % (step, inlaw)
else:
return "%s %scousin%s%s%s" % (_LEVEL_NAME[level],
step, inlaw,
_REMOVED_LEVEL[removed], dir)
DIST_SIB = "distant %(step)suncle/aunt%(inlaw)s"
def _get_sibling(self, level, step='', inlaw=''):
"""
Internal english method to create relation string
"""
if level < len(_SIBLING_LEVEL):
return _SIBLING_LEVEL[level] % {'step': step, 'inlaw': inlaw}
else:
return self.DIST_SIB % {'step': step, 'inlaw': inlaw}
def get_sibling_type(self, db, orig, other):
"""
Translation free determination of type of orig and other as siblings
The procedure returns sibling types, these can be passed to
get_sibling_relationship_string.
Only call this method if known that orig and other are siblings
"""
fatherorig, motherorig = self.get_birth_parents(db, orig)
fatherother, motherother = self.get_birth_parents(db, other)
if fatherorig and motherorig and fatherother and motherother:
if fatherother == fatherorig and motherother == motherorig:
return self.NORM_SIB
elif fatherother == fatherorig:
#all birth parents are known, one
return self.HALF_SIB_FATHER
elif motherother == motherorig:
return self.HALF_SIB_MOTHER
else:
return self.STEP_SIB
else:
# some birth parents are not known, hence we or cannot know if
# half siblings. step siblings might be possible, otherwise give up
orig_nb_par = self._get_nonbirth_parent_list(db, orig)
if fatherother and fatherother in orig_nb_par:
#the birth parent of other is non-birth of orig
if motherother and motherother == motherorig:
return self.HALF_SIB_MOTHER
else:
return self.STEP_SIB
if motherother and motherother in orig_nb_par:
#the birth parent of other is non-birth of orig
if fatherother and fatherother == fatherorig:
return self.HALF_SIB_FATHER
else:
return self.STEP_SIB
other_nb_par = self._get_nonbirth_parent_list(db, other)
if fatherorig and fatherorig in other_nb_par:
#the one birth parent of other is non-birth of orig
if motherorig and motherother == motherorig:
return self.HALF_SIB_MOTHER
else:
return self.STEP_SIB
if motherorig and motherorig in other_nb_par:
#the one birth parent of other is non-birth of orig
if fatherother and fatherother == fatherorig:
return self.HALF_SIB_FATHER
else:
return self.STEP_SIB
#there is an unknown birth parent, it could be that this is the
# birth parent of the other person
return self.UNKNOWN_SIB
def get_birth_parents(self, db, person):
"""
Method that returns the birthparents of a person as tuple
(mother handle, father handle), if no known birthparent, the
handle is replaced by None
"""
birthfather = None
birthmother = None
for fam in person.get_parent_family_handle_list():
family = db.get_family_from_handle(fam)
if not family:
continue
childrel = [(ref.get_mother_relation(), ref.get_father_relation())
for ref in family.get_child_ref_list()
if ref.ref == person.handle]
if not birthmother and childrel[0][0] == ChildRefType.BIRTH:
birthmother = family.get_mother_handle()
if not birthfather and childrel[0][1] == ChildRefType.BIRTH:
birthfather = family.get_father_handle()
if birthmother and birthfather:
break
return (birthmother, birthfather)
def _get_nonbirth_parent_list(self, db, person):
"""
Returns a list of handles of parents of which it is known
they are not birth parents.
So all parents which do not have relation BIRTH or UNKNOWN
are returned.
"""
nb_parents = []
for fam in person.get_parent_family_handle_list():
family = db.get_family_from_handle(fam)
if not family:
continue
childrel = [(ref.get_mother_relation(), ref.get_father_relation())
for ref in family.get_child_ref_list()
if ref.ref == person.handle]
if childrel[0][0] != ChildRefType.BIRTH \
and childrel[0][0] != ChildRefType.UNKNOWN:
nb_parents.append(family.get_mother_handle())
if childrel[0][1] != ChildRefType.BIRTH \
and childrel[0][1] != ChildRefType.UNKNOWN:
nb_parents.append(family.get_father_handle())
#make every person appear only once:
return list(set(nb_parents))
def _get_spouse_type(self, db, orig, other, all_rel=False):
"""
Translation free determination if orig and other are partners.
The procedure returns partner types, these can be passed to
get_partner_relationship_string.
If all_rel=False, returns None or a partner type.
If all_rel=True, returns a list, empty if no partner
"""
val = []
for family_handle in orig.get_family_handle_list():
family = db.get_family_from_handle(family_handle)
# return first found spouse type
if family and other.get_handle() in [family.get_father_handle(),
family.get_mother_handle()]:
family_rel = family.get_relationship()
#check for divorce event:
ex = False
for eventref in family.get_event_ref_list():
event = db.get_event_from_handle(eventref.ref)
if event and (event.get_type() == EventType.DIVORCE
or event.get_type() == EventType.ANNULMENT):
ex = True
break
if family_rel == FamilyRelType.MARRIED:
if ex:
val.append(self.PARTNER_EX_MARRIED)
else:
val.append(self.PARTNER_MARRIED)
elif family_rel == FamilyRelType.UNMARRIED:
if ex:
val.append(self.PARTNER_EX_UNMARRIED)
else:
val.append(self.PARTNER_UNMARRIED)
elif family_rel == FamilyRelType.CIVIL_UNION:
if ex:
val.append(self.PARTNER_EX_CIVIL_UNION)
else:
val.append(self.PARTNER_CIVIL_UNION)
else:
if ex:
val.append(self.PARTNER_EX_UNKNOWN_REL)
else:
val.append(self.PARTNER_UNKNOWN_REL)
if all_rel:
return val
else:
#last relation is normally the defenitive relation
if val:
return val[-1]
else:
return None
def is_spouse(self, db, orig, other, all_rel=False):
"""
Determine the spouse relation
"""
spouse_type = self._get_spouse_type(db, orig, other, all_rel)
if spouse_type:
return self.get_partner_relationship_string(spouse_type,
orig.get_gender(),
other.get_gender())
else:
return None
def get_relationship_distance_new(self, db, orig_person,
other_person,
all_families=False,
all_dist=False,
only_birth=True):
"""
Return if all_dist == True a 'tuple, string':
(rank, person handle, firstRel_str, firstRel_fam,
secondRel_str, secondRel_fam), msg
or if all_dist == True a 'list of tuple, string':
[.....], msg:
.. note:: _new can be removed once all rel_xx modules no longer
overwrite get_relationship_distance
The tuple or list of tuples consists of:
============== =====================================================
Element Description
============== =====================================================
rank Total number of generations from common ancestor to
the two persons, rank is -1 if no relations found
person_handle The Common ancestor
firstRel_str String with the path to the common ancestor
from orig Person
firstRel_fam Family numbers along the path as a list, eg [0,0,1].
For parent in multiple families, eg [0. [0, 2], 1]
secondRel_str String with the path to the common ancestor
from otherPerson
secondRel_fam Family numbers along the path, eg [0,0,1].
For parent in multiple families, eg [0. [0, 2], 1]
msg List of messages indicating errors. Empyt list if no
errors.
============== =====================================================
Example: firstRel_str = 'ffm' and firstRel_fam = [2,0,1] means
common ancestor is mother of the second family of the father of the
first family of the father of the third family.
Note that the same person might be present twice if the person is
reached via a different branch too. Path (firstRel_str and
secondRel_str) will of course be different.
:param db: database to work on
:param orig_person: first person
:type orig_person: Person Obj
:param other_person: second person, relation is sought between
first and second person
:type other_person: Person Obj
:param all_families: if False only Main family is searched, otherwise
all families are used
:type all_families: bool
:param all_dist: if False only the shortest distance is returned,
otherwise all relationships
:type all_dist: bool
:param only_birth: if True only parents with birth relation are
considered
:type only_birth: bool
"""
#data storage to communicate with recursive functions
self.__max_depth_reached = False
self.__loop_detected = False
self.__max_depth = self.get_depth()
self.__all_families = all_families
self.__all_dist = all_dist
self.__only_birth = only_birth
self.__crosslinks = False # no crosslinks
first_rel = -1
second_rel = -1
self.__msg = []
common = []
first_map = {}
second_map = {}
rank = 9999999
try:
if (self.storemap and self.stored_map is not None
and self.map_handle == orig_person.handle
and not self.dirtymap):
first_map = self.stored_map
self.__max_depth_reached, self.__loop_detected, \
self.__all_families,\
self.__all_dist, self.__only_birth,\
self.__crosslinks, self.__msg = self.map_meta
self.__msg = list(self.__msg)
else:
self.__apply_filter(db, orig_person, '', [], first_map)
self.map_meta = (self.__max_depth_reached,
self.__loop_detected,
self.__all_families,
self.__all_dist, self.__only_birth,
self.__crosslinks, list(self.__msg))
self.__apply_filter(db, other_person, '', [], second_map,
stoprecursemap=first_map)
except RuntimeError:
return (-1, None, -1, [], -1, []), \
[_("Relationship loop detected")] + self.__msg
if self.storemap:
self.stored_map = first_map
self.dirtymap = False
self.map_handle = orig_person.handle
for person_handle in second_map:
if person_handle in first_map:
com = []
#a common ancestor
for rel1, fam1 in zip(first_map[person_handle][0],
first_map[person_handle][1]):
len1 = len(rel1)
for rel2, fam2 in zip(second_map[person_handle][0],
second_map[person_handle][1]):
len2 = len(rel2)
#collect paths to arrive at common ancestor
com.append((len1+len2, person_handle, rel1, fam1,
rel2, fam2))
#insert common ancestor in correct position,
# if shorter links, check if not subset
# if longer links, check if not superset
pos = 0
for (ranknew, handlenew, rel1new, fam1new, rel2new,
fam2new) in com:
insert = True
for rank, handle, rel1, fam1, rel2, fam2 in common:
if ranknew < rank:
break
elif ranknew >= rank:
#check subset
if rel1 == rel1new[:len(rel1)] and \
rel2 == rel2new[:len(rel2)]:
#subset relation exists already
insert = False
break
pos += 1
if insert:
if common:
common.insert(pos, (ranknew, handlenew, rel1new,
fam1new, rel2new, fam2new))
else:
common = [(ranknew, handlenew, rel1new, fam1new,
rel2new, fam2new)]
#now check if superset must be deleted from common
deletelist = []
index = pos+1
for (rank, handle, rel1, fam1, rel2,
fam2) in common[pos+1:]:
if rel1new == rel1[:len(rel1new)] and \
rel2new == rel2[:len(rel2new)]:
deletelist.append(index)
index += 1
deletelist.reverse()
for index in deletelist:
del common[index]
#check for extra messages
if self.__max_depth_reached:
self.__msg += [_('Family Tree reaches back more than the maximum '
'%d generations searched.\nIt is possible that '
'relationships have been missed') %
(self.__max_depth)]
if common and not self.__all_dist:
rank = common[0][0]
person_handle = common[0][1]
first_rel = common[0][2]
first_fam = common[0][3]
second_rel = common[0][4]
second_fam = common[0][5]
return (rank, person_handle, first_rel, first_fam, second_rel,
second_fam), self.__msg
if common:
#list with tuples (rank, handle person,rel_str_orig,rel_fam_orig,
# rel_str_other,rel_fam_str) and messages
return common, self.__msg
if not self.__all_dist:
return (-1, None, '', [], '', []), self.__msg
else:
return [(-1, None, '', [], '', [])], self.__msg
def __apply_filter(self, db, person, rel_str, rel_fam, pmap,
depth=1, stoprecursemap=None):
"""
Typically this method is called recursively in two ways:
First method is stoprecursemap= None
In this case a recursemap is builded by storing all data.
Second method is with a stoprecursemap given
In this case parents are recursively looked up. If present in
stoprecursemap, a common ancestor is found, and the method can
stop looking further. If however self.__crosslinks == True, the data
of first contains loops, and parents
will be looked up anyway an stored if common. At end the doubles
are filtered out
"""
if person is None or not person.handle:
return
if depth > self.__max_depth:
self.__max_depth_reached = True
#print('Maximum ancestor generations ('+str(depth)+') reached', \
# '(' + rel_str + ').',\
# 'Stopping relation algorithm.')
return
depth += 1
commonancestor = False
store = True #normally we store all parents
if stoprecursemap:
store = False #but not if a stop map given
if person.handle in stoprecursemap:
commonancestor = True
store = True
#add person to the map, take into account that person can be obtained
#from different sides
if person.handle in pmap:
#person is already a grandparent in another branch, we already have
# had lookup of all parents, we call that a crosslink
if not stoprecursemap:
self.__crosslinks = True
pmap[person.handle][0] += [rel_str]
pmap[person.handle][1] += [rel_fam]
#check if there is no loop father son of his son, ...
# loop means person is twice reached, same rel_str in begin
for rel1 in pmap[person.handle][0]:
for rel2 in pmap[person.handle][0]:
if len(rel1) < len(rel2) and \
rel1 == rel2[:len(rel1)]:
#loop, keep one message in storage!
self.__loop_detected = True
self.__msg += [_("Relationship loop detected:") + " " +
_("Person %(person)s connects to himself via %(relation)s") %
{'person' : person.get_primary_name().get_name(),
'relation' : rel2[len(rel1):]}]
return
elif store:
pmap[person.handle] = [[rel_str], [rel_fam]]
#having added person to the pmap, we only look up recursively to
# parents if this person is not common relative
# if however the first map has crosslinks, we need to continue reduced
if commonancestor and not self.__crosslinks:
#don't continue search, great speedup!
return
family_handles = []
main = person.get_main_parents_family_handle()
if main:
family_handles = [main]
if self.__all_families:
family_handles = person.get_parent_family_handle_list()
try:
parentstodo = {}
fam = 0
for family_handle in family_handles:
rel_fam_new = rel_fam + [fam]
family = db.get_family_from_handle(family_handle)
if not family:
continue
#obtain childref for this person
childrel = [(ref.get_mother_relation(),
ref.get_father_relation())
for ref in family.get_child_ref_list()
if ref.ref == person.handle]
fhandle = family.father_handle
mhandle = family.mother_handle
for data in [(fhandle, self.REL_FATHER,
self.REL_FATHER_NOTBIRTH, childrel[0][1]),
(mhandle, self.REL_MOTHER,
self.REL_MOTHER_NOTBIRTH, childrel[0][0])]:
if data[0] and data[0] not in parentstodo:
persontodo = db.get_person_from_handle(data[0])
if data[3] == ChildRefType.BIRTH:
addstr = data[1]
elif not self.__only_birth:
addstr = data[2]
else:
addstr = ''
if addstr:
parentstodo[data[0]] = (persontodo,
rel_str + addstr,
rel_fam_new)
elif data[0] and data[0] in parentstodo:
#this person is already scheduled to research
#update family list
famlist = parentstodo[data[0]][2]
if not isinstance(famlist[-1], list) and \
fam != famlist[-1]:
famlist = famlist[:-1] + [[famlist[-1]]]
if isinstance(famlist[-1], list) and \
fam not in famlist[-1]:
famlist = famlist[:-1] + [famlist[-1] + [fam]]
parentstodo[data[0]] = (parentstodo[data[0]][0],
parentstodo[data[0]][1],
famlist)
if not fhandle and not mhandle and stoprecursemap is None:
#family without parents, add brothers for orig person
#other person has recusemap, and will stop when seeing
#the brother.
child_list = [ref.ref for ref in family.get_child_ref_list()
if ref.ref != person.handle]
addstr = self.REL_SIBLING
for chandle in child_list:
if chandle in pmap:
pmap[chandle][0] += [rel_str + addstr]
pmap[chandle][1] += [rel_fam_new]
#person is already a grandparent in another branch
else:
pmap[chandle] = [[rel_str+addstr], [rel_fam_new]]
fam += 1
for handle, data in parentstodo.items():
self.__apply_filter(db, data[0],
data[1], data[2],
pmap, depth, stoprecursemap)
except:
import traceback
traceback.print_exc()
return
def collapse_relations(self, relations):
"""
Internal method to condense the relationships as returned by
get_relationship_distance_new.
Common ancestors in the same family are collapsed to one entry,
changing the person paths to family paths, eg 'mf' and 'mm' become 'ma'
relations : list of relations as returned by
get_relationship_distance_new with all_dist = True
returns : the same data as relations, but collapsed, hence the
handle entry is now a list of handles, and the
path to common ancestors can now contain family
identifiers (eg 'a', ...)
In the case of sibling, this is replaced by family
with common ancestor handles empty list []!
"""
if relations[0][0] == -1:
return relations
commonnew = []
existing_path = []
for relation in relations:
relstrfirst = None
commonhandle = [relation[1]]
if relation[2]:
relstrfirst = relation[2][:-1]
relstrsec = None
if relation[4]:
relstrsec = relation[4][:-1]
relfamfirst = relation[3][:]
relfamsec = relation[5][:]
#handle pure sibling:
rela2 = relation[2]
rela4 = relation[4]
if relation[2] and relation[2][-1] == self.REL_SIBLING:
#sibling will be the unique common ancestor,
#change to a family with unknown handle for common ancestor
rela2 = relation[2][:-1] + self.REL_FAM_BIRTH
rela4 = relation[4] + self.REL_FAM_BIRTH
relfamsec = relfamsec + [relfamfirst[-1]]
relstrsec = relation[4][:-1]
commonhandle = []
# a unique path to family of common person:
familypaths = []
if relfamfirst and isinstance(relfamfirst[-1], list):
if relfamsec and isinstance(relfamsec[-1], list):
for val1 in relfamfirst[-1]:
for val2 in relfamsec[-1]:
familypaths.append((relstrfirst, relstrsec,
relfamfirst[:-1] + [val1],
relfamsec[:-1] + [val2]))
else:
for val1 in relfamfirst[-1]:
familypaths.append((relstrfirst, relstrsec,
relfamfirst[:-1] + [val1],
relfamsec))
elif relfamsec and isinstance(relfamsec[-1], list):
for val2 in relfamsec[-1]:
familypaths.append((relstrfirst, relstrsec,
relfamfirst,
relfamsec[:-1] + [val2]))
else:
familypaths.append((relstrfirst, relstrsec,
relfamfirst, relfamsec))
for familypath in familypaths:
#familypath = (relstrfirst, relstrsec, relfamfirst, relfamsec)
try:
posfam = existing_path.index(familypath)
except ValueError:
posfam = None
#if relstr is '', the ancestor is unique, if posfam None,
# first time we see this family path
if (posfam is not None and relstrfirst is not None and
relstrsec is not None):
# We already have a common ancestor of this family, just
# add the other, setting correct family relation.
tmp = commonnew[posfam]
frstcomstr = rela2[-1]
scndcomstr = tmp[2][-1]
newcomstra = self._famrel_from_persrel(frstcomstr,
scndcomstr)
frstcomstr = rela4[-1]
scndcomstr = tmp[4][-1]
newcomstrb = self._famrel_from_persrel(frstcomstr,
scndcomstr)
commonnew[posfam] = (tmp[0], tmp[1]+commonhandle,
rela2[:-1]+newcomstra,
tmp[3], rela4[:-1]+newcomstrb,
tmp[5])
else:
existing_path.append(familypath)
commonnew.append((relation[0], commonhandle, rela2,
familypath[2], rela4, familypath[3]))
#we now have multiple person handles, single families, now collapse
# families again if all else equal
collapsed = commonnew[:1]
for rel in commonnew[1:]:
found = False
for newrel in collapsed:
if newrel[0:3] == rel[0:3] and newrel[4] == rel[4]:
#another familypath to arrive at same result, merge
path1 = []
path2 = []
for a, b in zip(newrel[3], rel[3]):
if a == b:
path1.append(a)
elif isinstance(a, list):
path1.append(a.append(b))
else:
path1.append([a, b])
for a, b in zip(newrel[5], rel[5]):
if a == b:
path2.append(a)
elif isinstance(a, list):
path2.append(a.append(b))
else:
path2.append([a, b])
newrel[3][:] = path1[:]
newrel[5][:] = path2[:]
found = True
break
if not found:
collapsed.append(rel)
return collapsed
def _famrel_from_persrel(self, persrela, persrelb):
"""
Conversion from eg 'f' and 'm' to 'a', so relation to the two
persons of a common family is converted to a family relation
"""
if persrela == persrelb:
#should not happen, procedure called in error, just return value
return persrela
if ((persrela == self.REL_MOTHER and persrelb == self.REL_FATHER) or
(persrelb == self.REL_MOTHER and persrela == self.REL_FATHER)):
return self.REL_FAM_BIRTH
if ((persrela == self.REL_MOTHER and
persrelb == self.REL_FATHER_NOTBIRTH) or
(persrelb == self.REL_MOTHER and
persrela == self.REL_FATHER_NOTBIRTH)):
return self.REL_FAM_BIRTH_MOTH_ONLY
if ((persrela == self.REL_FATHER and
persrelb == self.REL_MOTHER_NOTBIRTH) or
(persrelb == self.REL_FATHER and
persrela == self.REL_MOTHER_NOTBIRTH)):
return self.REL_FAM_BIRTH_FATH_ONLY
#catch calling with family relations already, return val
if (persrela == self.REL_FAM_BIRTH or
persrela == self.REL_FAM_BIRTH_FATH_ONLY or
persrela == self.REL_FAM_BIRTH_MOTH_ONLY or
persrela == self.REL_FAM_NONBIRTH):
return persrela
if (persrelb == self.REL_FAM_BIRTH or
persrelb == self.REL_FAM_BIRTH_FATH_ONLY or
persrelb == self.REL_FAM_BIRTH_MOTH_ONLY or
persrelb == self.REL_FAM_NONBIRTH):
return persrelb
return self.REL_FAM_NONBIRTH
def only_birth(self, path):
"""
Given a path to common ancestor. Return True if only birth
relations, False otherwise
"""
for value in path:
if value in [self.REL_FAM_NONBIRTH, self.REL_FATHER_NOTBIRTH,
self.REL_MOTHER_NOTBIRTH]:
return False
return True
def get_one_relationship(self, db, orig_person, other_person,
extra_info=False, olocale=glocale):
"""
Returns a string representing the most relevant relationship between
the two people. If extra_info = True, extra information is returned:
(relation_string, distance_common_orig, distance_common_other)
If olocale is passed in (a GrampsLocale) that language will be used.
:param olocale: allow selection of the relationship language
:type olocale: a GrampsLocale instance
"""
self._locale = olocale
stop = False
if orig_person is None:
rel_str = _("undefined")
stop = True
if not stop and orig_person.get_handle() == other_person.get_handle():
rel_str = ''
stop = True
if not stop:
is_spouse = self.is_spouse(db, orig_person, other_person)
if is_spouse:
rel_str = is_spouse
stop = True
if stop:
if extra_info:
return (rel_str, -1, -1)
else:
return rel_str
data, msg = self.get_relationship_distance_new(
db, orig_person, other_person, all_dist=True, all_families=True,
only_birth=False)
if data[0][0] == -1:
if extra_info:
return ('', -1, -1)
else:
return ''
data = self.collapse_relations(data)
#most relevant relationship is a birth family relation of lowest rank
databest = [data[0]]
rankbest = data[0][0]
for rel in data:
#data is sorted on rank
if rel[0] == rankbest:
databest.append(rel)
rel = databest[0]
dist_orig = len(rel[2])
dist_other = len(rel[4])
if len(databest) == 1:
birth = self.only_birth(rel[2]) and self.only_birth(rel[4])
if dist_orig == dist_other == 1:
rel_str = self.get_sibling_relationship_string(
self.get_sibling_type(db, orig_person, other_person),
orig_person.get_gender(),
other_person.get_gender())
else:
rel_str = self.get_single_relationship_string(
dist_orig, dist_other,
orig_person.get_gender(), other_person.get_gender(),
rel[2], rel[4], only_birth=birth,
in_law_a=False, in_law_b=False)
else:
order = [self.REL_FAM_BIRTH, self.REL_FAM_BIRTH_MOTH_ONLY,
self.REL_FAM_BIRTH_FATH_ONLY, self.REL_MOTHER,
self.REL_FATHER, self.REL_SIBLING, self.REL_FAM_NONBIRTH,
self.REL_MOTHER_NOTBIRTH, self.REL_FATHER_NOTBIRTH]
orderbest = order.index(self.REL_MOTHER)
for relother in databest:
relbirth = self.only_birth(rel[2]) and self.only_birth(rel[4])
if relother[2] == '' or relother[4] == '':
#direct relation, take that
rel = relother
break
if not relbirth and self.only_birth(relother[2]) \
and self.only_birth(relother[4]):
#birth takes precedence
rel = relother
continue
if order.index(relother[2][-1]) < order.index(rel[2][-1]) and\
order.index(relother[2][-1]) < orderbest:
rel = relother
continue
if order.index(relother[4][-1]) < order.index(rel[4][-1]) and\
order.index(relother[4][-1]) < orderbest:
rel = relother
continue
if order.index(rel[2][-1]) < orderbest or \
order.index(rel[4][-1]) < orderbest:
#keep the good one
continue
if order.index(relother[2][-1]) < order.index(rel[2][-1]):
rel = relother
continue
if order.index(relother[2][-1]) == order.index(rel[2][-1]) and\
order.index(relother[4][-1]) < order.index(rel[4][-1]):
rel = relother
continue
dist_orig = len(rel[2])
dist_other = len(rel[4])
birth = self.only_birth(rel[2]) and self.only_birth(rel[4])
if dist_orig == dist_other == 1:
rel_str = self.get_sibling_relationship_string(
self.get_sibling_type(db, orig_person, other_person),
orig_person.get_gender(),
other_person.get_gender())
else:
rel_str = self.get_single_relationship_string(
dist_orig, dist_other,
orig_person.get_gender(), other_person.get_gender(),
rel[2], rel[4], only_birth=birth,
in_law_a=False, in_law_b=False)
if extra_info:
return (rel_str, dist_orig, dist_other)
else:
return rel_str
def get_all_relationships(self, db, orig_person, other_person):
"""
Return a tuple, of which the first entry is a list with all
relationships in text, and the second a list of lists of all common
ancestors that have that text as relationship
"""
relstrings = []
commons = {}
if orig_person is None:
return ([], [])
if orig_person.get_handle() == other_person.get_handle():
return ([], [])
is_spouse = self.is_spouse(db, orig_person, other_person)
if is_spouse:
relstrings.append(is_spouse)
commons[is_spouse] = []
data, msg = self.get_relationship_distance_new(
db, orig_person, other_person, all_dist=True, all_families=True,
only_birth=False)
if data[0][0] != -1:
data = self.collapse_relations(data)
for rel in data:
rel2 = rel[2]
rel4 = rel[4]
rel1 = rel[1]
dist_orig = len(rel[2])
dist_other = len(rel[4])
if rel[2] and rel[2][-1] == self.REL_SIBLING:
rel2 = rel2[:-1] + self.REL_FAM_BIRTH
dist_other += 1
rel4 = rel4 + self.REL_FAM_BIRTH
rel1 = None
birth = self.only_birth(rel2) and self.only_birth(rel4)
if dist_orig == dist_other == 1:
rel_str = self.get_sibling_relationship_string(
self.get_sibling_type(db, orig_person, other_person),
orig_person.get_gender(), other_person.get_gender())
else:
rel_str = self.get_single_relationship_string(
dist_orig, dist_other,
orig_person.get_gender(), other_person.get_gender(),
rel2, rel4, only_birth=birth,
in_law_a=False, in_law_b=False)
if rel_str not in relstrings:
relstrings.append(rel_str)
if rel1:
commons[rel_str] = rel1
else:
#unknown parent eg
commons[rel_str] = []
else:
if rel1:
commons[rel_str].extend(rel1)
#construct the return tupply, relstrings is ordered on rank automatic
common_list = []
for rel_str in relstrings:
common_list.append(commons[rel_str])
return (relstrings, common_list)
def get_plural_relationship_string(self, Ga, Gb,
reltocommon_a='', reltocommon_b='',
only_birth=True,
in_law_a=False, in_law_b=False):
"""
Provide a string that describes the relationsip between a person, and
a group of people with the same relationship. E.g. "grandparents" or
"children".
Ga and Gb can be used to mathematically calculate the relationship.
.. seealso::
http://en.wikipedia.org/wiki/Cousin#Mathematical_definitions
:param Ga: The number of generations between the main person and the
common ancestor.
:type Ga: int
:param Gb: The number of generations between the group of people and the
common ancestor
:type Gb: int
:param reltocommon_a: relation path to common ancestor or common
Family for person a.
Note that length = Ga
:type reltocommon_a: str
:param reltocommon_b: relation path to common ancestor or common
Family for person b.
Note that length = Gb
:type reltocommon_b: str
:param only_birth: True if relation between a and b is by birth only
False otherwise
:type only_birth: bool
:param in_law_a: True if path to common ancestors is via the partner
of person a
:type in_law_a: bool
:param in_law_b: True if path to common ancestors is via the partner
of person b
:type in_law_b: bool
:returns: A string describing the relationship between the person and
the group.
:rtype: str
"""
rel_str = "distant relatives"
if Ga == 0:
# These are descendants
if Gb < len(_CHILDREN_LEVEL):
rel_str = _CHILDREN_LEVEL[Gb]
else:
rel_str = "distant descendants"
elif Gb == 0:
# These are parents/grand parents
if Ga < len(_PARENTS_LEVEL):
rel_str = _PARENTS_LEVEL[Ga]
else:
rel_str = "distant ancestors"
elif Gb == 1:
# These are siblings/aunts/uncles
if Ga < len(_SIBLINGS_LEVEL):
rel_str = _SIBLINGS_LEVEL[Ga]
else:
rel_str = "distant uncles/aunts"
elif Ga == 1:
# These are nieces/nephews
if Gb < len(_NEPHEWS_NIECES_LEVEL):
rel_str = _NEPHEWS_NIECES_LEVEL[Gb]
else:
rel_str = "distant nephews/nieces"
elif Ga > 1 and Ga == Gb:
# These are cousins in the same generation
if Ga <= len(_LEVEL_NAME):
rel_str = "%s cousins" % _LEVEL_NAME[Ga-1]
else:
rel_str = "distant cousins"
elif Ga > 1 and Ga > Gb:
# These are cousins in different generations with the second person
# being in a higher generation from the common ancestor than the
# first person.
if Gb <= len(_LEVEL_NAME) and (Ga-Gb) < len(_REMOVED_LEVEL):
rel_str = "%s cousins%s (up)" % (_LEVEL_NAME[Gb-1],
_REMOVED_LEVEL[Ga-Gb])
else:
rel_str = "distant cousins"
elif Gb > 1 and Gb > Ga:
# These are cousins in different generations with the second person
# being in a lower generation from the common ancestor than the
# first person.
if Ga <= len(_LEVEL_NAME) and (Gb-Ga) < len(_REMOVED_LEVEL):
rel_str = "%s cousins%s (down)" % (_LEVEL_NAME[Ga-1],
_REMOVED_LEVEL[Gb-Ga])
else:
rel_str = "distant cousins"
if in_law_b is True:
rel_str = "spouses of %s" % rel_str
return rel_str
def get_single_relationship_string(self, Ga, Gb, gender_a, gender_b,
reltocommon_a, reltocommon_b,
only_birth=True,
in_law_a=False, in_law_b=False):
"""
Provide a string that describes the relationsip between a person, and
another person. E.g. "grandparent" or "child".
To be used as: 'person b is the grandparent of a', this will be in
translation string: 'person b is the %(relation)s of a'
Note that languages with gender should add 'the' inside the
translation, so eg in french: 'person b est %(relation)s de a'
where relation will be here: le grandparent
Ga and Gb can be used to mathematically calculate the relationship.
.. seealso::
http://en.wikipedia.org/wiki/Cousin#Mathematical_definitions
Some languages need to know the specific path to the common ancestor.
Those languages should use reltocommon_a and reltocommon_b which is
a string like 'mfmf'.
The possible string codes are:
======================= ===========================================
Code Description
======================= ===========================================
REL_MOTHER # going up to mother
REL_FATHER # going up to father
REL_MOTHER_NOTBIRTH # going up to mother, not birth relation
REL_FATHER_NOTBIRTH # going up to father, not birth relation
REL_FAM_BIRTH # going up to family (mother and father)
REL_FAM_NONBIRTH # going up to family, not birth relation
REL_FAM_BIRTH_MOTH_ONLY # going up to fam, only birth rel to mother
REL_FAM_BIRTH_FATH_ONLY # going up to fam, only birth rel to father
======================= ===========================================
Prefix codes are stripped, so REL_FAM_INLAW_PREFIX is not present.
If the relation starts with the inlaw of the person a, then 'in_law_a'
is True, if it starts with the inlaw of person b, then 'in_law_b' is
True.
Also REL_SIBLING (# going sideways to sibling (no parents)) is not
passed to this routine. The collapse_relations changes this to a
family relation.
Hence, calling routines should always strip REL_SIBLING and
REL_FAM_INLAW_PREFIX before calling get_single_relationship_string()
Note that only_birth=False, means that in the reltocommon one of the
NOTBIRTH specifiers is present.
The REL_FAM identifiers mean that the relation is not via a common
ancestor, but via a common family (note that that is not possible for
direct descendants or direct ancestors!). If the relation to one of the
parents in that common family is by birth, then 'only_birth' is not
set to False. The only_birth() method is normally used for this.
:param Ga: The number of generations between the main person and the
common ancestor.
:type Ga: int
:param Gb: The number of generations between the other person and the
common ancestor.
:type Gb: int
:param gender_a: gender of person a
:type gender_a: int gender
:param gender_b: gender of person b
:type gender_b: int gender
:param reltocommon_a: relation path to common ancestor or common
Family for person a.
Note that length = Ga
:type reltocommon_a: str
:param reltocommon_b: relation path to common ancestor or common
Family for person b.
Note that length = Gb
:type reltocommon_b: str
:param in_law_a: True if path to common ancestors is via the partner
of person a
:type in_law_a: bool
:param in_law_b: True if path to common ancestors is via the partner
of person b
:type in_law_b: bool
:param only_birth: True if relation between a and b is by birth only
False otherwise
:type only_birth: bool
:returns: A string describing the relationship between the two people
:rtype: str
.. note:: 1. the self.REL_SIBLING should not be passed to this routine,
so we should not check on it. All other self.
2. for better determination of siblings, use if Ga=1=Gb
get_sibling_relationship_string
"""
if only_birth:
step = ''
else:
step = self.STEP
if in_law_a or in_law_b:
inlaw = self.INLAW
else:
inlaw = ''
rel_str = "distant %srelative%s" % (step, inlaw)
if Ga == 0:
# b is descendant of a
if Gb == 0:
rel_str = 'same person'
elif gender_b == MALE:
rel_str = self._get_son(Gb, step, inlaw)
elif gender_b == FEMALE:
rel_str = self._get_daughter(Gb, step, inlaw)
else:
rel_str = self._get_child_unknown(Gb, step, inlaw)
elif Gb == 0:
# b is parents/grand parent of a
if gender_b == MALE:
rel_str = self._get_father(Ga, step, inlaw)
elif gender_b == FEMALE:
rel_str = self._get_mother(Ga, step, inlaw)
else:
rel_str = self._get_parent_unknown(Ga, step, inlaw)
elif Gb == 1:
# b is sibling/aunt/uncle of a
if gender_b == MALE:
rel_str = self._get_uncle(Ga, step, inlaw)
elif gender_b == FEMALE:
rel_str = self._get_aunt(Ga, step, inlaw)
else:
rel_str = self._get_sibling(Ga, step, inlaw)
elif Ga == 1:
# b is niece/nephew of a
if gender_b == MALE:
rel_str = self._get_nephew(Gb-1, step, inlaw)
elif gender_b == FEMALE:
rel_str = self._get_niece(Gb-1, step, inlaw)
elif Gb < len(_NIECE_LEVEL) and Gb < len(_NEPHEW_LEVEL):
rel_str = "%s or %s" % (self._get_nephew(Gb-1, step, inlaw),
self._get_niece(Gb-1, step, inlaw))
else:
rel_str = "distant %snephews/nieces%s" % (step, inlaw)
elif Ga == Gb:
# a and b cousins in the same generation
rel_str = self._get_cousin(Ga-1, 0, dir='', step=step, inlaw=inlaw)
elif Ga > Gb:
# These are cousins in different generations with the second person
# being in a higher generation from the common ancestor than the
# first person.
rel_str = self._get_cousin(Gb-1, Ga-Gb, dir=' (up)',
step=step, inlaw=inlaw)
elif Gb > Ga:
# These are cousins in different generations with the second person
# being in a lower generation from the common ancestor than the
# first person.
rel_str = self._get_cousin(Ga-1, Gb-Ga, dir=' (down)',
step=step, inlaw=inlaw)
return rel_str
def get_sibling_relationship_string(self, sib_type, gender_a, gender_b,
in_law_a=False, in_law_b=False):
"""
Determine the string giving the relation between two siblings of
type sib_type.
Eg: b is the brother of a
Here 'brother' is the string we need to determine
This method gives more details about siblings than
get_single_relationship_string can do.
.. warning:: DON'T TRANSLATE THIS PROCEDURE IF LOGIC IS EQUAL IN YOUR
LANGUAGE, AND SAME METHODS EXIST (get_uncle, get_aunt,
get_sibling)
"""
if sib_type == self.NORM_SIB or sib_type == self.UNKNOWN_SIB:
typestr = ''
elif sib_type == self.HALF_SIB_MOTHER \
or sib_type == self.HALF_SIB_FATHER:
typestr = self.HALF
elif sib_type == self.STEP_SIB:
typestr = self.STEP
if in_law_a or in_law_b:
inlaw = self.INLAW
else:
inlaw = ''
if gender_b == MALE:
rel_str = self._get_uncle(1, typestr, inlaw)
elif gender_b == FEMALE:
rel_str = self._get_aunt(1, typestr, inlaw)
else:
rel_str = self._get_sibling(1, typestr, inlaw)
return rel_str
def get_partner_relationship_string(self, spouse_type, gender_a, gender_b):
"""
Determine the string giving the relation between two partners of
type spouse_type.
Eg: b is the spouse of a
Here 'spouse' is the string we need to determine
.. warning:: DON'T TRANSLATE THIS PROCEDURE IF LOGIC IS EQUAL IN YOUR
LANGUAGE, AS GETTEXT IS ALREADY USED !
"""
#english only needs gender of b, we don't guess if unknown like in old
# procedure as that is stupid in present day cases!
gender = gender_b
if not spouse_type:
return ''
trans_text = _
# trans_text is a defined keyword (see po/update_po.py, po/genpot.sh)
if hasattr(self, '_locale') and self._locale != glocale:
trans_text = self._locale.translation.sgettext
if spouse_type == self.PARTNER_MARRIED:
if gender == MALE:
return trans_text("husband")
elif gender == FEMALE:
return trans_text("wife")
else:
return trans_text("gender unknown|spouse")
elif spouse_type == self.PARTNER_EX_MARRIED:
if gender == MALE:
return trans_text("ex-husband")
elif gender == FEMALE:
return trans_text("ex-wife")
else:
return trans_text("gender unknown|ex-spouse")
elif spouse_type == self.PARTNER_UNMARRIED:
if gender == MALE:
return trans_text("unmarried|husband")
elif gender == FEMALE:
return trans_text("unmarried|wife")
else:
return trans_text("gender unknown,unmarried|spouse")
elif spouse_type == self.PARTNER_EX_UNMARRIED:
if gender == MALE:
return trans_text("unmarried|ex-husband")
elif gender == FEMALE:
return trans_text("unmarried|ex-wife")
else:
return trans_text("gender unknown,unmarried|ex-spouse")
elif spouse_type == self.PARTNER_CIVIL_UNION:
if gender == MALE:
return trans_text("male,civil union|partner")
elif gender == FEMALE:
return trans_text("female,civil union|partner")
else:
return trans_text("gender unknown,civil union|partner")
elif spouse_type == self.PARTNER_EX_CIVIL_UNION:
if gender == MALE:
return trans_text("male,civil union|former partner")
elif gender == FEMALE:
return trans_text("female,civil union|former partner")
else:
return trans_text("gender unknown,civil union|former partner")
elif spouse_type == self.PARTNER_UNKNOWN_REL:
if gender == MALE:
return trans_text("male,unknown relation|partner")
elif gender == FEMALE:
return trans_text("female,unknown relation|partner")
else:
return trans_text("gender unknown,unknown relation|partner")
else:
# here we have spouse_type == self.PARTNER_EX_UNKNOWN_REL
# or other not catched types
if gender == MALE:
return trans_text("male,unknown relation|former partner")
elif gender == FEMALE:
return trans_text("female,unknown relation|former partner")
else:
return trans_text("gender unknown,unknown relation|former partner")
def connect_db_signals(self, dbstate):
"""
We can save work by storing a map, however, if database changes
this map must be regenerated.
Before close, the calling app must call disconnect_db_signals
"""
if self.__db_connected:
return
assert len(self.signal_keys) == 0
self.state_signal_key = dbstate.connect('database-changed',
self._dbchange_callback)
self.__connect_db_signals(dbstate.db)
def __connect_db_signals(self, db):
signals = ['person-add', 'person-update', 'person-delete',
'person-rebuild', 'family-add', 'family-update',
'family-delete', 'family-rebuild', 'database-changed']
for name in signals:
self.signal_keys.append(db.connect(name, self._datachange_callback))
self.storemap = True
self.__db_connected = True
def disconnect_db_signals(self, dbstate):
"""
Method to disconnect to all signals the relationship calculator is
subscribed
"""
dbstate.disconnect(self.state_signal_key)
list(map(dbstate.db.disconnect, self.signal_keys))
self.storemap = False
self.stored_map = None
def _dbchange_callback(self, db):
"""
When database changes, the map can no longer be used.
Connects must be remade
"""
self.dirtymap = True
#signals are disconnected on close of old database, connect to new
self.__connect_db_signals(db)
def _datachange_callback(self, handle_list=None):
"""
When data in database changes, the map can no longer be used.
As the map might be in use or might be generated at the moment,
this method sets a dirty flag. Before reusing the map, this flag
will be checked
"""
self.dirtymap = True
#-------------------------------------------------------------------------
#
# define the default relationshipcalculator
#
#-------------------------------------------------------------------------
__RELCALC_CLASS = None
def get_relationship_calculator(reinit=False, clocale=glocale):
"""
Return the relationship calculator for the current language.
If clocale is passed in (a GrampsLocale) then that language will be used.
:param clocale: allow selection of the relationship language
:type clocale: a GrampsLocale instance
"""
global __RELCALC_CLASS
if __RELCALC_CLASS is None or reinit:
lang = clocale.language[0]
__RELCALC_CLASS = RelationshipCalculator
# If lang not set default to English relationship calulator
# See if lang begins with en_, English_ or english_
# If so return standard relationship calculator.
if lang.startswith("en") or lang == "C":
return __RELCALC_CLASS()
# set correct non English relationship calculator based on lang
relation_translation_found = False
for plugin in PluginRegister.get_instance().relcalc_plugins():
if lang in plugin.lang_list:
pmgr = BasePluginManager.get_instance()
# the loaded module is put in variable mod
mod = pmgr.load_plugin(plugin)
if mod:
__RELCALC_CLASS = eval('mod.' + plugin.relcalcclass)
relation_translation_found = True
break
if not relation_translation_found and \
len(PluginRegister.get_instance().relcalc_plugins()):
LOG.warning(_("Family relationship translator not available for "
"language '%s'. Using 'english' instead."), lang)
return __RELCALC_CLASS()
#-------------------------------------------------------------------------
#
# Tests
#
#-------------------------------------------------------------------------
MAX = 30
FMT = '%+50s'
def _test(rcalc, onlybirth, inlawa, inlawb, printrelstr, test_num=None):
"""
This is a generic test suite for the singular relationship
TRANSLATORS: do NOT translate, use __main__ !
"""
import sys
import random
random.seed()
def _rand_f_m():
if random.randint(0, 1) == 0:
return 'f'
else:
return 'm'
def _rand_relstr(length, endstr):
if length == 0:
return ''
else:
relstr = ''
for i in range(length-1):
relstr += _rand_f_m()
return relstr + endstr
if test_num is None:
print("""
Select a test:
0 - all tests
1 - testing sons
2 - testing daughters
3 - testing unknown children
4 - testing grandfathers
5 - testing grandmothers
6 - testing unknown parents
7 - testing nieces
8 - testing nephews
9 - testing unknown nephews/nieces
10 - testing uncles
11 - testing aunts
12 - testing unknown uncles/aunts
13 - testing male cousins same generation
14 - testing female cousins same generation
15 - testing unknown cousins same generation
16 - testing some cousins up
17 - testing some cousins down
Please enter a test number and press Enter for continue:
""")
test_num = sys.stdin.readline().strip()
test_num = int(test_num)
if test_num == 0 or test_num == 1:
print('\ntesting sons')
#sys.stdin.readline()
for i in range(MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
0, i, MALE, MALE, '', relstr, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 2:
print('\ntesting daughters\n')
#sys.stdin.readline()
for i in range(MAX):
relstr = _rand_relstr(i, 'm')
rel = FMT % rcalc.get_single_relationship_string(
0, i, MALE, FEMALE, '', relstr, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 3:
print('\ntesting unknown children\n')
#sys.stdin.readline()
for i in range(MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
0, i, MALE, UNKNOWN, '', relstr, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 4:
print('\ntesting grandfathers\n')
#sys.stdin.readline()
for i in range(MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
i, 0, FEMALE, MALE, relstr, '', only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 5:
print('\ntesting grandmothers\n')
#sys.stdin.readline()
for i in range(MAX):
relstr = _rand_relstr(i, 'm')
rel = FMT % rcalc.get_single_relationship_string(
i, 0, FEMALE, FEMALE, relstr, '', only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 6:
print('\ntesting unknown parents\n')
#sys.stdin.readline()
for i in range(MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
i, 0, FEMALE, UNKNOWN, relstr, '', only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 7:
print('\ntesting nieces\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstr = _rand_relstr(i, 'm')
rel = FMT % rcalc.get_single_relationship_string(
1, i, FEMALE, FEMALE, 'm', relstr, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 8:
print('\ntesting nephews\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
1, i, FEMALE, MALE, 'f', relstr, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 9:
print('\ntesting unknown nephews/nieces\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
1, i, FEMALE, UNKNOWN, 'f', relstr, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 10:
print('\ntesting uncles\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
i, 1, FEMALE, MALE, relstr, 'f', only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 11:
print('\ntesting aunts\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstr = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
i, 1, MALE, FEMALE, relstr, 'f', only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 12:
print('\ntesting unknown uncles/aunts\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstr = _rand_relstr(i, 'm')
rel = FMT % rcalc.get_single_relationship_string(
i, 1, MALE, UNKNOWN, relstr, 'm', only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstr)
else:
print(rel)
if test_num == 0 or test_num == 13:
print('\ntesting male cousins same generation\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstra = _rand_relstr(i, 'f')
relstrb = _rand_relstr(i, 'f')
rel = FMT % rcalc.get_single_relationship_string(
i, i, MALE, MALE, relstra, relstrb, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
if test_num == 0 or test_num == 14:
print('\ntesting female cousins same generation\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstra = _rand_relstr(i, 'm')
relstrb = _rand_relstr(i, 'm')
rel = FMT % rcalc.get_single_relationship_string(
i, i, MALE, FEMALE, relstra, relstrb, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
if test_num == 0 or test_num == 15:
print('\ntesting unknown cousins same generation\n')
#sys.stdin.readline()
for i in range(1, MAX):
relstra = _rand_relstr(i, 'm')
relstrb = _rand_relstr(i, 'm')
rel = FMT % rcalc.get_single_relationship_string(
i, i, MALE, UNKNOWN, relstra, relstrb, only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
if test_num == 0 or test_num == 16:
print('\ntesting some cousins up\n')
#sys.stdin.readline()
random.seed()
for i in range(1, MAX):
for j in range(i, MAX):
rnd = random.randint(0, 100)
if rnd < 10:
relstra = _rand_relstr(j, 'f')
relstrb = _rand_relstr(i, 'f')
if rnd < 5:
rel = (FMT + ' |info: female, Ga=%2d, Gb=%2d') % (
rcalc.get_single_relationship_string(
j, i, MALE, FEMALE, relstra, relstrb,
only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb), j, i)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
else:
rel = (FMT + ' |info: male, Ga=%2d, Gb=%2d') % (
rcalc.get_single_relationship_string(
j, i, MALE, MALE, relstra, relstrb,
only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb), j, i)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
if test_num == 0 or test_num == 17:
print('\ntesting some cousins down\n')
#sys.stdin.readline()
for i in range(1, MAX):
for j in range(i, MAX):
rnd = random.randint(0, 100)
if rnd < 10:
relstra = _rand_relstr(i, 'f')
relstrb = _rand_relstr(j, 'f')
if rnd < 5:
rel = (FMT + ' |info: female, Ga=%2d, Gb=%2d') % (
rcalc.get_single_relationship_string(
i, j, MALE, FEMALE, relstra, relstrb,
only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb), i, j)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
else:
rel = (FMT + ' |info: male, Ga=%2d, Gb=%2d') % (
rcalc.get_single_relationship_string(
i, j, MALE, MALE, relstra, relstrb,
only_birth=onlybirth,
in_law_a=inlawa, in_law_b=inlawb), i, j)
if printrelstr:
print(rel + ' |info:', relstra, relstrb)
else:
print(rel)
def _testsibling(rcalc):
vals = [(rcalc.NORM_SIB, 'sibling'),
(rcalc.HALF_SIB_MOTHER, 'half sib mother side'),
(rcalc.HALF_SIB_FATHER, 'half sib father side'),
(rcalc.STEP_SIB, 'step sib'),
(rcalc.UNKNOWN_SIB, 'undetermined sib')]
for gendr, strgen in [(MALE, 'male'),
(FEMALE, 'female'),
(UNKNOWN, 'unknown')]:
for inlaw in [False, True]:
for sibt, text in vals:
print(FMT % rcalc.get_sibling_relationship_string(
sibt, MALE, gendr, in_law_a=inlaw) +
' |info:', text, strgen)
def _test_spouse(rcalc):
vals = [(rcalc.PARTNER_MARRIED, 'married'),
(rcalc.PARTNER_UNMARRIED, 'unmarried'),
(rcalc.PARTNER_CIVIL_UNION, 'civil union'),
(rcalc.PARTNER_UNKNOWN_REL, 'unknown rel'),
(rcalc.PARTNER_EX_MARRIED, 'ex-married'),
(rcalc.PARTNER_EX_UNMARRIED, 'ex-unmarried'),
(rcalc.PARTNER_EX_CIVIL_UNION, 'ex civil union'),
(rcalc.PARTNER_EX_UNKNOWN_REL, 'ex unknown rel')]
for gender, strgen in [(MALE, 'male'),
(FEMALE, 'female'),
(UNKNOWN, 'unknown')]:
for spouse_type, text in vals:
print(FMT % rcalc.get_partner_relationship_string(
spouse_type, MALE, gender) +
' |info: gender='+strgen+', rel='+text)
def test(rcalc, printrelstr):
"""
This is a generic test suite for the singular relationship
TRANSLATORS: do NOT translate, call this from
__main__ in the rel_xx.py module.
"""
import sys
import argparse
parser = argparse.ArgumentParser(description='Test the Relationship Calculator')
parser.add_argument('-r', type=int, help='type of the relations test')
parser.add_argument('-s', type=int, help='type of the singular relationship test')
args = parser.parse_args()
test_num = args.r
if test_num is None:
print("""
Select a test:
0 - all tests
1 - Test normal relations
2 - Test step relations
3 - Test in-law relations (first pers)
4 - Test step and in-law relations
5 - Test sibling types
6 - Test partner types
Letter 'f' means Father, 'm' means Mother
Please enter a test number and press Enter for continue:
""")
test_num = sys.stdin.readline().strip()
test_num = int(test_num)
if test_num == 0 or test_num == 1:
print('\n\n=== Test normal relations ===')
_test(rcalc, True, False, False, printrelstr, args.s)
if test_num == 0 or test_num == 2:
print('\n\n=== Test step relations ===')
_test(rcalc, False, False, False, printrelstr, args.s)
if test_num == 0 or test_num == 3:
print('\n\n=== Test in-law relations (first pers) ===')
_test(rcalc, True, True, False, printrelstr, args.s)
if test_num == 0 or test_num == 4:
print('\n\n=== Test step and in-law relations ===')
_test(rcalc, False, True, False, printrelstr, args.s)
if test_num == 0 or test_num == 5:
print('\n\n=== Test sibling types ===')
_testsibling(rcalc)
if test_num == 0 or test_num == 6:
print('\n\n=== Test partner types ===')
_test_spouse(rcalc)
if __name__ == "__main__":
"""
TRANSLATORS, copy this if statement at the bottom of your
rel_xx.py module, after adding: 'from Relationship import test'
and test your work with:
export PYTHONPATH=/path/to/gramps/src
python src/plugins/rel_xx.py
See eg rel_fr.py at the bottom
"""
REL_CALC = RelationshipCalculator()
test(REL_CALC, True)
| beernarrd/gramps | gramps/gen/relationship.py | Python | gpl-2.0 | 129,799 |
import reversion
from django.contrib import admin
from .models import Protocol, Topic, Institution
class ProtocolAdminIndex(reversion.VersionAdmin, admin.ModelAdmin):
list_display = [
'number',
'start_time',
'get_topics',
'information',
'majority',
'current_majority',
'institution']
list_display_links = ['number']
list_filter = ['institution__name', 'topics']
filter_horizontal = ['files']
search_fields = ['number', 'institution__name', 'topics__name', 'information']
class TopicAdminIndex(reversion.VersionAdmin, admin.ModelAdmin):
list_display = ['name', 'voted_for', 'voted_against', 'voted_abstain', 'protocol']
list_filter = ['protocol__number']
search_fields = ['name', 'protocol__number']
filter_horizontal = ['files']
class InstitutionAdminIndex(reversion.VersionAdmin):
pass
admin.site.register(Institution, InstitutionAdminIndex)
admin.site.register(Topic, TopicAdminIndex)
admin.site.register(Protocol, ProtocolAdminIndex)
| Hackfmi/Diaphanum | protocols/admin.py | Python | mit | 1,042 |
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from views import DashboardView
urlpatterns = patterns('',
url(r'^dashboard/render', DashboardView.as_view(),
name="render"),
url(r'^dashboard', direct_to_template,
{'template': 'dashboard/index.html'}, name='main'),
)
| mivanov/editkit | editkit/dashboard/urls.py | Python | gpl-2.0 | 344 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.gis.db import models
from mapwidgets.widgets import GooglePointFieldWidget
from homes_for_sale.models import *
class SaleAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',), }
list_filter = ['branch__name', 'property_type__name', 'property_tenure__name', 'status']
list_display = ['title', 'address_1', 'town_city', 'postcode', 'status']
search_fields = ['title', 'address_1', 'town_city', 'postcode']
formfield_overrides = {
models.PointField: {"widget": GooglePointFieldWidget}
}
fieldsets = (
('General', {
'fields': ['title', 'slug', 'status']
}),
('Address', {
'fields': ['address_1', 'address_2', 'address_3', 'town_city', 'county', 'postcode', 'display_address'],
}),
('Geographic', {
'fields': ['location']
}),
('Categorisation', {
'fields': ['branch', 'property_tenure', 'property_type', 'bedrooms', 'en_suites', 'receptions', 'garden', 'parking', 'retirement', 'new_home', 'shared_ownership', 'auction']
}),
('Detail', {
'fields': ['details', 'summary']
}),
('Price', {
'fields': ['price', 'qualifier']
}),
('Date', {
'fields': ['expires_at']
}),
)
class Media:
css = {
'all':['build/css/admin/override/map.min.css']
}
class SaleFeatureAdmin(admin.ModelAdmin):
fields = ('property','text','display_order')
list_display = ['property','text']
class SaleContactAdmin(admin.ModelAdmin):
fields = ('property','title','forename','surname','message','telephone','email','country','postcode','more_details','view_property')
class SalePictureAdmin(admin.ModelAdmin):
fields = ('property','caption','attachment','display_order','status')
list_display = ['property','caption']
class SaleMediaAdmin(admin.ModelAdmin):
fields = ('property','media_type','description','attachment','status')
list_display = ['property', 'media_type','description','status']
class SaleNoteAdmin(admin.ModelAdmin):
fields = ('property','text')
list_display = ['property']
admin.site.register(Sale, SaleAdmin)
admin.site.register(SaleFeature, SaleFeatureAdmin)
admin.site.register(SalePicture, SalePictureAdmin)
admin.site.register(SaleMedia, SaleMediaAdmin)
admin.site.register(SaleContact, SaleContactAdmin)
admin.site.register(SaleNote, SaleNoteAdmin)
| bertnotbob/django-property | homes_for_sale/admin.py | Python | mit | 2,584 |
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from gem.context_processors import (
compress_settings,
detect_freebasics,
detect_kaios,
)
from gem.tests.base import GemTestCaseMixin
class TestDetectKaiOS(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
def test_returns_false_for_requests_without_kaios_subdomain(self):
request = self.request_factory.get('/')
self.assertEqual(detect_kaios(request), {'is_via_kaios': False})
def test_returns_true_for_requests_with_kaios_subdomain(self):
request = self.request_factory.get('/', HTTP_HOST='kaios.localhost:80')
self.assertEqual(detect_kaios(request), {'is_via_kaios': True})
class TestDetectFreebasics(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
def test_returns_false_by_default(self):
request = self.request_factory.get('/')
self.assertEqual(
detect_freebasics(request),
{'is_via_freebasics': False},
)
def test_returns_true_if_internetorg_in_httpvia(self):
request = self.request_factory.get('/', HTTP_VIA='Internet.org')
self.assertEqual(
detect_freebasics(request),
{'is_via_freebasics': True},
)
def test_returns_true_if_internetorgapp_in_user_agent(self):
request = self.request_factory.get(
'/',
HTTP_USER_AGENT='InternetOrgApp',
)
self.assertEqual(
detect_freebasics(request),
{'is_via_freebasics': True},
)
def test_returns_true_if_true_in_xiorgsfbs(self):
request = self.request_factory.get('/', HTTP_X_IORG_FBS='true')
self.assertEqual(
detect_freebasics(request),
{'is_via_freebasics': True},
)
class TestCompressSettings(TestCase, GemTestCaseMixin):
def setUp(self):
self.main = self.mk_main(
title='main1', slug='main1', path='00010002', url_path='/main1/')
@override_settings(ENV='test_env', STATIC_URL='test_static_url')
def test_returns_settings(self):
request = RequestFactory().get('/')
self.assertEqual(
compress_settings(request),
{
'LOGIN_URL': reverse('molo.profiles:auth_login'),
'VIEW_PROFILE_URL': u'/profiles/view/myprofile/',
'EDIT_PROFILE_URL': u'/profiles/edit/myprofile/',
'REGISTRATION_URL': u'/profiles/register/',
'LOGOUT_URL': reverse('molo.profiles:auth_logout'),
'ENV': 'test_env',
'STATIC_URL': '/test_static_url',
}
)
| praekelt/molo-gem | gem/tests/test_context_processors.py | Python | bsd-2-clause | 2,737 |
from distutils import dirname
from mgl2d.graphics.quad_drawable import QuadDrawable
from mgl2d.graphics.shader_program import ShaderProgram
from mgl2d.graphics.texture import Texture
from mgl2d.math.vector2 import Vector2
class CharDef:
def __init__(self):
self.id = 0
self.x = self.y = 0
self.width = self.height = 0
self.offset_x = self.offset_y = 0
self.advance_x = 0
self.page_index = 0
# not supported (1 = blue, 2 = green, 4 = red, 8 = alpha, 15 = all channels)
self.texture_channel = 0
self.letter = ''
# File format: http://www.angelcode.com/products/bmfont/doc/file_format.html
class BMFontDef:
def __init__(self, filename):
self._page_files = []
self._char_definitions = {}
self._parse_file(filename)
@property
def size(self):
return self._size
@property
def page_width(self):
return self._page_w
@property
def page_height(self):
return self._page_h
@property
def bold(self):
return self._bold
@property
def italic(self):
return self._italic
@property
def page_files(self, ):
return self._page_files
def get_char(self, letter):
return self._char_definitions[letter]
def extents_for_char(self, char):
oc = ord(char)
if oc not in self._char_definitions:
return 0, 0
c = self._char_definitions[oc]
width = c.advance_x
height = c.height + c.offset_y
return width, height
def _parse_file(self, filename):
with open(filename) as file:
for line in file.readlines():
tokens = self._tokenize_line(line)
section, data = tokens
if not section:
continue
if section == 'info':
self._parse_info(data)
elif section == 'common':
self._parse_common(data)
elif section == 'page':
self._parse_page(data)
elif section == 'chars':
pass
elif section == 'char':
self._parse_char(data)
def _parse_info(self, data):
self._face = data['face'].replace('"', '')
self._size = int(data['size'])
self._bold = True if int(data['bold']) == 1 else False
self._italic = True if int(data['italic']) == 1 else False
self._unicode = True if int(data['unicode']) == 1 else False
self._stretch_h = int(data['stretchH'])
self._smooth = int(data['smooth'])
self._super_sampling = int(data['aa'])
# self._char_paddind = data['padding']
# self._char_spacing = data['spacing']
def _parse_common(self, data):
self._line_height = int(data['lineHeight'])
self._base = int(data['base'])
self._page_w = int(data['scaleW'])
self._page_h = int(data['scaleH'])
self._packed = int(data['packed'])
self._page_files = [''] * int(data['pages'])
def _parse_page(self, data):
self._page_files[int(data['id'])] = data['file'].replace('"', '')
def _parse_char(self, data):
char = CharDef()
char.id = int(data['id'])
char.x = int(data['x'])
char.y = int(data['y'])
char.width = int(data['width'])
char.height = int(data['height'])
char.offset_x = int(data['xoffset'])
char.offset_y = int(data['yoffset'])
char.advance_x = int(data['xadvance'])
char.page_index = int(data['page'])
char.texture_channel = int(data['chnl'])
char.letter = data['letter'].replace('"', '')
if char.letter == 'space':
char.letter = ' '
self._char_definitions[char.letter] = char
def _tokenize_line(self, line):
line = line.splitlines()[0]
if not line:
return None, None
data = {}
tokens = []
for part in line.split('='):
tokens.extend(part.rsplit(' ', 1))
section, tokens = tokens[0], tokens[1:]
for k, v in zip(tokens[::2], tokens[1::2]):
data[k] = v
return section, data
# Supports multiple font files with unique sizes
class Font:
def __init__(self):
self._font_faces = {}
self._page_textures = {}
self._character_program = ShaderProgram.from_sources(vert_source=self.vert_shader_base,
frag_source=self.frag_shader_texture)
self._quad = QuadDrawable()
self._quad.shader = self._character_program
def load_bmfont_file(self, filename):
base_dir = dirname(filename)
font_def = BMFontDef(filename)
self._font_faces[font_def.size] = font_def
self._page_textures[font_def.size] = []
for file in font_def.page_files:
path = base_dir + '/' + file
self._page_textures[font_def.size].append(Texture().load_from_file(path))
def draw_string(self, screen, font_size, string, x, y, scale=1):
font = self._font_faces[font_size]
for char in string:
c = font.get_char(char)
self._quad.texture = self._page_textures[font_size][c.page_index]
self._quad.size = Vector2(c.width, c.height) * scale
self._quad.pos = Vector2(x + c.offset_x * scale, y + c.offset_y * scale)
self._character_program.bind()
self._character_program.set_uniform_2f('area_pos', c.x / font.page_width, c.y / font.page_height)
self._character_program.set_uniform_2f('area_size', c.width / font.page_width, c.height / font.page_height)
self._quad.draw(screen)
x += c.advance_x * scale
def draw_char(self, screen, font_size, char, x, y, scale=1):
font = self._font_faces[font_size]
c = font.get_char(char)
s = self._character_program
self._quad.texture = self._page_textures[font_size][c.page_index]
self._quad.size = Vector2(c.width, c.height) * scale
self._quad.pos = Vector2(x, y + c.offset_y * scale)
self._quad.shader = s
s.bind()
s.set_uniform_matrix4('projection', screen.projection_matrix.m)
s.set_uniform_2f('area_pos', c.x / font.page_width, c.y / font.page_height)
s.set_uniform_2f('area_size', c.width / font.page_width, c.height / font.page_height)
self._quad.draw(screen)
return c.advance_x
vert_shader_base = """
#version 330 core
uniform mat4 model;
uniform mat4 projection;
uniform vec2 area_pos;
uniform vec2 area_size;
layout(location=0) in vec2 vertex;
layout(location=1) in vec2 uv;
out vec2 uv_out;
void main() {
vec2 texture_out = uv;
if (gl_VertexID == 0) {
texture_out = area_pos;
} else if (gl_VertexID == 1) {
texture_out = vec2(area_pos.x, area_pos.y+area_size.y);
} else if (gl_VertexID == 2) {
texture_out = vec2(area_pos.x+area_size.x, area_pos.y+area_size.y);
} else if (gl_VertexID == 3) {
texture_out = vec2(area_pos.x+area_size.x, area_pos.y);
}
vec4 vertex_world = model * vec4(vertex, 1, 1);
gl_Position = projection * vertex_world;
uv_out = texture_out;
}
"""
frag_shader_texture = """
#version 330 core
in vec2 uv_out;
out vec4 color;
uniform sampler2D tex;
void main() {
color = texture(tex, uv_out);
}
"""
| maxfish/mgl2d | mgl2d/graphics/font.py | Python | mit | 7,748 |
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from django.test.client import Client
from paypal.standard.ipn.models import PayPalIPN
from paypal.standard.ipn.signals import (payment_was_successful,
payment_was_flagged)
IPN_POST_PARAMS = {
"protection_eligibility": "Ineligible",
"last_name": "User",
"txn_id": "51403485VH153354B",
"receiver_email": settings.PAYPAL_RECEIVER_EMAIL,
"payment_status": "Completed",
"payment_gross": "10.00",
"tax": "0.00",
"residence_country": "US",
"invoice": "0004",
"payer_status": "verified",
"txn_type": "express_checkout",
"handling_amount": "0.00",
"payment_date": "23:04:06 Feb 02, 2009 PST",
"first_name": "Test",
"item_name": "",
"charset": "windows-1252",
"custom": "website_id=13&user_id=21",
"notify_version": "2.6",
"transaction_subject": "",
"test_ipn": "1",
"item_number": "",
"receiver_id": "258DLEHY2BDK6",
"payer_id": "BN5JZ2V7MLEV4",
"verify_sign": "An5ns1Kso7MWUdW4ErQKJJJ4qi4-AqdZy6dD.sGO3sDhTf1wAbuO2IZ7",
"payment_fee": "0.59",
"mc_fee": "0.59",
"mc_currency": "USD",
"shipping": "0.00",
"payer_email": "bishan_1233269544_per@gmail.com",
"payment_type": "instant",
"mc_gross": "10.00",
"quantity": "1",
}
class IPNTest(TestCase):
urls = 'paypal.standard.ipn.tests.test_urls'
def setUp(self):
self.old_debug = settings.DEBUG
settings.DEBUG = True
# Monkey patch over PayPalIPN to make it get a VERFIED response.
self.old_postback = PayPalIPN._postback
PayPalIPN._postback = lambda self: "VERIFIED"
def tearDown(self):
settings.DEBUG = self.old_debug
PayPalIPN._postback = self.old_postback
def assertGotSignal(self, signal, flagged):
# Check the signal was sent. These get lost if they don't reference self.
self.got_signal = False
self.signal_obj = None
def handle_signal(sender, **kwargs):
self.got_signal = True
self.signal_obj = sender
signal.connect(handle_signal)
response = self.client.post("/ipn/", IPN_POST_PARAMS)
self.assertEqual(response.status_code, 200)
ipns = PayPalIPN.objects.all()
self.assertEqual(len(ipns), 1)
ipn_obj = ipns[0]
self.assertEqual(ipn_obj.flag, flagged)
self.assertTrue(self.got_signal)
self.assertEqual(self.signal_obj, ipn_obj)
def test_correct_ipn(self):
self.assertGotSignal(payment_was_successful, False)
def test_failed_ipn(self):
PayPalIPN._postback = lambda self: "INVALID"
self.assertGotSignal(payment_was_flagged, True)
def assertFlagged(self, updates, flag_info):
params = IPN_POST_PARAMS.copy()
params.update(updates)
response = self.client.post("/ipn/", params)
self.assertEqual(response.status_code, 200)
ipn_obj = PayPalIPN.objects.all()[0]
self.assertEqual(ipn_obj.flag, True)
self.assertEqual(ipn_obj.flag_info, flag_info)
def test_incorrect_receiver_email(self):
update = {"receiver_email": "incorrect_email@someotherbusiness.com"}
flag_info = "Invalid receiver_email. (incorrect_email@someotherbusiness.com)"
self.assertFlagged(update, flag_info)
def test_invalid_payment_status(self):
update = {"payment_status": "Failed"}
flag_info = "Invalid payment_status. (Failed)"
self.assertFlagged(update, flag_info)
def test_duplicate_txn_id(self):
self.client.post("/ipn/", IPN_POST_PARAMS)
self.client.post("/ipn/", IPN_POST_PARAMS)
self.assertEqual(len(PayPalIPN.objects.all()), 2)
ipn_obj = PayPalIPN.objects.order_by('-created_at')[1]
self.assertEqual(ipn_obj.flag, True)
self.assertEqual(ipn_obj.flag_info, "Duplicate txn_id. (51403485VH153354B)") | bluestemscott/librarygadget | librarygadget/paypal/standard/ipn/tests/test_ipn.py | Python | mit | 4,140 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : lookup_property
Description : Set properties for Lookup data type
Date : 02/January/2016
copyright : (C) 2015 by UN-Habitat and implementing partners.
See the accompanying file CONTRIBUTORS.txt in the root
email : stdm@unhabitat.org
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.PyQt import uic
from qgis.PyQt.QtWidgets import (
QDialog
)
from stdm.data.configuration.entity_relation import EntityRelation
from stdm.ui.gui_utils import GuiUtils
from stdm.ui.wizard.create_lookup import LookupEditor
WIDGET, BASE = uic.loadUiType(
GuiUtils.get_ui_file_path('wizard/ui_lookup_property.ui'))
EX_VALUE_LIST = []
class LookupProperty(WIDGET, BASE):
"""
Editor to create/edit Lookup column property
"""
def __init__(self, parent, form_fields, profile=None):
"""
:param parent: Owner of this form
:type parent: QWidget
:param entity_relation: EntityRelation object
:type entity_relation: EntityRelation
:param profile: Current configuration profile
:type profile: Profile
"""
QDialog.__init__(self, parent)
self.setupUi(self)
self._entity_relation = form_fields['entity_relation']
self.in_db = form_fields['in_db']
self._lookup_name = ''
self._profile = profile
self.init_gui()
def init_gui(self):
"""
Initializes form widgets
"""
self.btnNewlookup.clicked.connect(self.create_lookup)
lookup_names = self.lookup_entities()
self.fill_lookup_cbo(lookup_names)
if self._entity_relation:
self._lookup_name = self._entity_relation.parent.short_name
self.cboPrimaryEntity.setCurrentIndex( \
self.cboPrimaryEntity.findText(self._lookup_name))
# disable controls if the column already exist in the database
self.cboPrimaryEntity.setEnabled(not self.in_db)
self.btnNewlookup.setEnabled(not self.in_db)
def create_lookup(self):
"""
Creates a new lookup entity, insert it to the current lookup combobox
and make it the current lookup
"""
editor = LookupEditor(self, self._profile)
result = editor.exec_()
if result == 1:
name = editor.lookup.short_name
names = []
names.append(name)
self.cboPrimaryEntity.insertItems(0, names)
self.cboPrimaryEntity.setCurrentIndex( \
self.cboPrimaryEntity.findText(name))
def lookup_entities(self):
"""
Returns a list of ValueList (a.k.a lookup) names in the current profile
rtype: list
"""
names = []
for value_list in self._profile.value_lists():
if value_list.short_name not in EX_VALUE_LIST:
names.append(value_list.short_name)
return names
def fill_lookup_cbo(self, names):
"""
Fill combobox with entity names
:param names: List of entity names
"""
self.cboPrimaryEntity.clear()
self.cboPrimaryEntity.insertItems(0, names)
self.cboPrimaryEntity.setCurrentIndex(0)
def add_values(self):
"""
Construct an EntityRelation instance
"""
lookup_name = str(self.cboPrimaryEntity.currentText())
self._lookup_name = lookup_name
er_fields = {}
er_fields['parent'] = lookup_name
er_fields['parent_column'] = None
er_fields['display_columns'] = []
er_fields['child'] = None
er_fields['child_column'] = None
self._entity_relation = EntityRelation(self._profile, **er_fields)
def entity_relation(self):
"""
Returns an instance of EntityRelation
rtype: EntityRelation
"""
return self._entity_relation
def accept(self):
self.add_values()
self.done(1)
def reject(self):
self.done(0)
| gltn/stdm | stdm/ui/wizard/lookup_property.py | Python | gpl-2.0 | 4,938 |
# -*- coding: utf-8 -*-
from jenkins import Jenkins
from simple_manager.utils import git_add
from simple_manager.utils import git_clone
from simple_manager.utils import git_commit
from simple_manager.utils import git_push_branch
from simple_manager.utils import git_push_tag
from simple_manager.utils import git_tag
from shutil import copytree
import os
class Repository():
def __init__(self, name, remote_url=None, description=None,
long_description=None, keywords=None,
author=None, email=None, version=None):
self.name = name
self.remote_url = remote_url
self.description = description
self.long_description = long_description
self.keywords = keywords
self.author = author
self.email = email
self.version = version
def commit_changes(self, workspace):
git_add(self.name, workspace)
git_commit(workspace, self.name)
git_push_branch(workspace, self.name, "master")
def tag(self, workspace, project, version):
git_tag(workspace, project, version, "Migration package to a new respository.")
git_push_tag(workspace, project)
def extract_repo(self, frm, to):
import git_repo_manager
pkg_name = frm.split('/')[-1]
path = os.path.join(to, self.name, pkg_name)
git_clone(self.name, self.remote_url, to)
print("Extracting code from {0} to {1}").format(frm, path)
copytree(src=frm, dst=path)
print("Generating necessary files do create a dist")
print(" ... setup.py")
with open(os.path.join(git_repo_manager.__path__[0],
"templates/setup.py.conf"), 'r') as setup_tmpl:
content = setup_tmpl.read()
content = content.replace("$name", self.name)
content = content.replace("$version", self.version)
content = content.replace("$description", self.author)
content = content.replace("$long_description", self.long_description)
content = content.replace("$keywords", self.keywords)
content = content.replace("$author", self.author)
content = content.replace("$email", self.email)
content = content.replace("$url", self.remote_url)
content = content.replace("$pkg_dir", pkg_name)
with open(os.path.join(to, self.name, "setup.py"),
'w') as setup_file:
setup_file.write(content)
print(" ... README.md")
with open(os.path.join(git_repo_manager.__path__[0],
"templates/README.md.conf"), 'r') as rdm_tmpl:
content = rdm_tmpl.read()
content = content.replace("$name", self.name)
content = content.replace("$description", self.author)
with open(os.path.join(to, self.name, "README.md"),
'w') as rdm_file:
rdm_file.write(content)
self.commit_changes(to)
self.tag(to, self.name, self.version)
def config_jenkins(self, jenkins_url, jenkins_config_file, job_name):
ci = Jenkins(url=jenkins_url)
config = ""
with open(jenkins_config_file, "r") as config_file:
config = config_file.read()
config_file.close()
ci.create_job(job_name, config)
class GitoriousRepository(Repository):
pass
| victorpantoja/git-repo-manager | git_repo_manager/classes.py | Python | mit | 3,348 |
from django.contrib.auth.models import Group
from django.db import models
# Create your models here.
from proyectos.models import Proyecto
class Rol(Group):
proyecto = models.ForeignKey(Proyecto) | alforro/sgpa2015 | roles/models.py | Python | gpl-2.0 | 201 |
from pathlib import Path
import pytest
from .factories import LibraryFactory
# pytestmark = pytest.mark.django_db
@pytest.fixture
def photo_fixture_snow(db):
from photonix.photos.utils.db import record_photo
snow_path = str(Path(__file__).parent / 'photos' / 'snow.jpg')
library = LibraryFactory()
return record_photo(snow_path, library)
@pytest.fixture
def photo_fixture_tree(db):
from photonix.photos.utils.db import record_photo
tree_path = str(Path(__file__).parent / 'photos' / 'tree.jpg')
library = LibraryFactory()
return record_photo(tree_path, library)
def test_color_via_runner(photo_fixture_snow):
from photonix.classifiers.color.model import run_on_photo
# Path on it's own returns a None Photo object along with the result
snow = str(Path(__file__).parent / 'photos' / 'snow.jpg')
photo, result = run_on_photo(snow)
assert photo is None
assert len(result) == 8
assert result[0][0] == 'Azure'
assert '{0:.3f}'.format(result[0][1]) == '0.891'
# Passing in a Photo object should tag the object
assert photo_fixture_snow.photo_tags.count() == 0
photo, result = run_on_photo(photo_fixture_snow.id)
assert photo_fixture_snow.photo_tags.count() == 8
assert photo_fixture_snow.photo_tags.all()[0].tag.name == 'Azure'
assert photo_fixture_snow.photo_tags.all()[0].tag.type == 'C'
assert '{0:.3f}'.format(photo_fixture_snow.photo_tags.all()[0].significance) == '0.891'
def test_location_via_runner(photo_fixture_tree):
from photonix.classifiers.location.model import run_on_photo
# Path on it's own returns a None Photo object along with the result
snow = str(Path(__file__).parent / 'photos' / 'snow.jpg')
photo, result = run_on_photo(snow)
# This photo has no GPS coordinates
assert photo is None
assert result['city'] is None
assert result['country'] is None
# Path which does have GPS coordinates
tree = str(Path(__file__).parent / 'photos' / 'tree.jpg')
photo, result = run_on_photo(tree)
assert result['country']['name'] == 'Greece'
assert result['country']['code'] == 'GR'
assert result['city']['name'] == 'Firá'
assert result['city']['country_name'] == 'Greece'
# Photo object with location to tag should have tags for country and city
assert photo_fixture_tree.photo_tags.count() == 0
photo, result = run_on_photo(photo_fixture_tree.id)
assert photo.photo_tags.all().count() == 2
assert photo.photo_tags.all()[0].tag.name == 'Greece'
assert photo.photo_tags.all()[0].confidence == 1.0
assert photo.photo_tags.all()[0].significance == 1.0
assert photo.photo_tags.all()[1].tag.name == 'Firá'
assert photo.photo_tags.all()[1].confidence == 0.5
assert photo.photo_tags.all()[1].significance == 0.5
assert photo.photo_tags.all()[1].tag.parent.name == 'Greece'
def test_object_via_runner(photo_fixture_snow):
from photonix.classifiers.object.model import run_on_photo
# Path on it's own returns a None Photo object along with the result
snow = str(Path(__file__).parent / 'photos' / 'snow.jpg')
photo, result = run_on_photo(snow)
assert photo is None
assert len(result) == 3
assert result[0]['label'] == 'Tree'
assert '{0:.3f}'.format(result[0]['significance']) == '0.134'
# Passing in a Photo object should tag the object
assert photo_fixture_snow.photo_tags.count() == 0
photo, result = run_on_photo(photo_fixture_snow.id)
assert photo_fixture_snow.photo_tags.count() == 3
assert photo_fixture_snow.photo_tags.all()[0].tag.name == 'Tree'
assert photo_fixture_snow.photo_tags.all()[0].tag.type == 'O'
assert '{0:.3f}'.format(photo_fixture_snow.photo_tags.all()[0].significance) == '0.134'
def test_style_via_runner(photo_fixture_snow):
from photonix.classifiers.style.model import run_on_photo
# Path on it's own returns a None Photo object along with the result
snow = str(Path(__file__).parent / 'photos' / 'snow.jpg')
photo, result = run_on_photo(snow)
assert photo is None
assert len(result) == 1
assert result[0][0] == 'serene'
assert '{0:.3f}'.format(result[0][1]) == '0.962'
# Passing in a Photo object should tag the object
assert photo_fixture_snow.photo_tags.count() == 0
photo, result = run_on_photo(photo_fixture_snow.id)
assert photo_fixture_snow.photo_tags.count() == 1
assert photo_fixture_snow.photo_tags.all()[0].tag.name == 'serene'
assert photo_fixture_snow.photo_tags.all()[0].tag.type == 'S'
assert '{0:.3f}'.format(photo_fixture_snow.photo_tags.all()[0].significance) == '0.962'
| damianmoore/photo-manager | tests/test_classifier_runners.py | Python | agpl-3.0 | 4,655 |
import bpy, http, threading, pprint, types, os
import http.server
import socketserver
from mathutils import Euler
from bpy.props import *
from bpy.types import Operator
from .websocket_server import websocket_server
bl_info = {
"name": "Remote 3D Navigation ",
"author": "Laxminarayan Kamath <kamathln@gmail.com>",
"version": (1, 2),
"blender": (2, 57, 0),
"location": "View3D > Tool Shelf > 3D Remote",
"description": "Navigate the Camera & 3D View from any html5 capable device with accelerometer",
"warning": "Uses Multi-Threading! Toy add-on!! Do Not use on production blender!",
"wiki_url": "",
"tracker_url": "",
"category": "3D View"}
rpcserver_localvars=types.ModuleType('main').__dict__
rpcserver_globalvars=globals()
rpcserver = None
tcpserver = None
wstserver = None
commands = {}
def rotate_view(x_angle,y_angle,z_angle):
x_angle = float(x_angle)
y_angle = float(y_angle)
z_angle = float(z_angle)
try:
for view in rpcserver_localvars['views2control']:
view.region_3d.view_rotation.rotate(Euler((x_angle,y_angle,z_angle)))
return True
except Exception as E:
return pprint.pformat(E)
def move_view(x_distance,y_distance,z_distance):
x_distance = float(x_distance)
y_distance = float(y_distance)
z_distance = float(z_distance)
print ('distances {0}_{1}_{2}'.format(x_distance, y_distance, z_distance))
try:
for view in rpcserver_localvars['views2control']:
view.region_3d.view_location += view.region_3d.view_location.__class__((x_distance, y_distance, z_distance))
#view.region_3d.view_location.x+=x_distance
#view.region_3d.view_location.y+=y_distance
#view.region_3d.view_location.z+=z_distance
return True
except Exception as E:
return pprint.pformat(E)
def move_camera(x_distance,y_distance,z_distance):
x_distance = float(x_distance)
y_distance = float(y_distance)
z_distance = float(z_distance)
try:
for view in rpcserver_localvars['views2control']:
view.camera.location+=view.camera.location.__class__((x_distance,y_distance,z_distance))
return True
except Exception as E:
return pprint.pformat(E)
def rotate_camera(x_angle,y_angle,z_angle):
x_angle = float(x_angle)
y_angle = float(y_angle)
z_angle = float(z_angle)
try:
for view in rpcserver_localvars['views2control']:
view.camera.rotation_euler.x+=x_angle
view.camera.rotation_euler.y+=y_angle
view.camera.rotation_euler.z+=z_angle
return True
except Exception as E:
return pprint.pformat(E)
def rotateabsolute_camera(x_angle,y_angle,z_angle):
x_angle = float(x_angle)
y_angle = float(y_angle)
z_angle = float(z_angle)
try:
for view in rpcserver_localvars['views2control']:
view.camera.rotation_euler = view.camera.rotation_euler.__class__((x_angle,y_angle,z_angle))
return True
except Exception as E:
return pprint.pformat(E)
def move_selection(x_distance,y_distance,z_distance):
x_distance = float(x_distance)
y_distance = float(y_distance)
z_distance = float(z_distance)
try:
for scene in rpcserver_localvars['scenes2control']:
scene.objects.active.location+=scene.objects.active.location.__class__(x_distance,y_distance,z_distance)
return True
except Exception as E:
return pprint.pformat(E)
def rotate_selection(x_angle,y_angle,z_angle):
x_angle = float(x_angle)
y_angle = float(y_angle)
z_angle = float(z_angle)
try:
for scene in rpcserver_localvars['scenes2control']:
scene.objects.active.rotation_euler.x+=x_angle
scene.objects.active.rotation_euler.y+=y_angle
scene.objects.active.rotation_euler.z+=z_angle
return True
except Exception as E:
return pprint.pformat(E)
def rotateabsolute_selection(x_angle,y_angle,z_angle):
x_angle = float(x_angle)
y_angle = float(y_angle)
z_angle = float(z_angle)
try:
for scene in rpcserver_localvars['scenes2control']:
scene.objects.active.rotation_euler = scene.objects.active.rotation_euler.__class__((x_angle,y_angle,z_angle))
return True
except Exception as E:
return pprint.pformat(E)
def handle_wscmdline(client,cmdline):
if (str(cmdline) == 'PING'):
wstserver.send_message(client,'PONG')
else:
handle_cmdline(cmdline)
def handle_cmdline(cmdline):
# space = bytes(' ','ascii')
print (cmdline)
try:
if (cmdline):
params = str(cmdline).strip().split(' ')
command=params.pop(0)
commands[command](*params)
except KeyError:
print ("Warning: Unimplemented command called : {0}".format(command))
except Exception as e:
pprint.pprint (e)
class TCPCommandHandler(socketserver.StreamRequestHandler):
def handle(self):
while True:
cmdline = self.rfile.readline()
if (cmdline):
handle_cmdline(cmdline)
else:
break
class ServerThread(threading.Thread):
def __init__(self,server):
super(ServerThread,self).__init__()
self.server = server
self.rpcserver_localvars = rpcserver_localvars
self.rpcserver_globalvars = rpcserver_globalvars
def run(self):
self.server.serve_forever()
class VIEW3D_PT_RemotePanel(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_label = "Remote Nav"
def draw(self, context):
self.layout.prop(context.window_manager,'view_subscribed_to_remote')
self.layout.prop(context.scene,'scene_subscribed_to_remote')
def get_view_subscription(self):
global rpcserver_localvars
return bpy.context.area.spaces[0] in rpcserver_localvars['views2control']
def set_view_subscription(self,value):
global rpcserver_localvars
if value and not get_view_subscription(self):
rpcserver_localvars['views2control'].append(bpy.context.area.spaces[0])
if not value and get_view_subscription(self):
rpcserver_localvars['views2control'].remove(bpy.context.area.spaces[0])
def get_scene_subscription(self):
global rpcserver_localvars
return bpy.context.scene in rpcserver_localvars['scenes2control']
def set_scene_subscription(self,value):
global rpcserver_localvars
if value and not get_scene_subscription(self):
rpcserver_localvars['scenes2control'].append(bpy.context.scene)
if not value and get_scene_subscription(self):
rpcserver_localvars['scenes2control'].remove(bpy.context.scene)
def register():
global rpcserver
global tcpserver
global wstserver
global rpcserver_localvars
global rpcserver_globalvars
global commands
commands[b'rotateabsolute_camera'] = rotateabsolute_camera
commands[b'rotate_camera'] = rotate_camera
commands[b'move_camera'] = move_camera
commands[b'rotate_view'] = rotate_view
commands[b'move_view'] = move_view
commands['rotateabsolute_camera'] = rotateabsolute_camera
commands['rotate_camera'] = rotate_camera
commands['rotate_selection'] = rotate_selection
commands['move_camera'] = move_camera
commands['rotate_view'] = rotate_view
commands['move_view'] = move_view
def debug(*args):
print (args)
commands['debug'] = debug
bpy.types.WindowManager.view_subscribed_to_remote = BoolProperty(
name="Control View and camera",
description="Subscribe current view to the remote",
default=0,
subtype="UNSIGNED",
get = get_view_subscription,
set = set_view_subscription)
bpy.types.Scene.scene_subscribed_to_remote = BoolProperty(
name="Control Selection ",
description="Subscribe current view to the remote",
default=0,
subtype="UNSIGNED",
get = get_scene_subscription,
set = set_scene_subscription)
bpy.utils.register_module(__name__)
if (not rpcserver):
os.chdir(os.path.dirname(__file__))
rpcserver = http.server.HTTPServer(('0.0.0.0',9000), http.server.SimpleHTTPRequestHandler)
tcpserver = socketserver.ThreadingTCPServer(('0.0.0.0',9002),TCPCommandHandler)
wstserver = websocket_server.WebsocketServer(9001,'0.0.0.0')
wstserver.set_fn_message_received(lambda client,server,message: handle_wscmdline(client,message))
rpcserver_globalvars=globals()
rpcserver_localvars['views2control']=[]
rpcserver_localvars['scenes2control']=[]
rpcserver_thread=ServerThread(rpcserver)
rpcserver_thread.daemon = False
rpcserver_thread.start()
tcpserver_thread=ServerThread(tcpserver)
tcpserver_thread.daemon = False
tcpserver_thread.start()
wstserver_thread=ServerThread(wstserver)
wstserver_thread.daemon = False
wstserver_thread.start()
def unregister():
print("stopping rpcserver")
global rpcserver
global tcpserver
rpcserver.socket.close()
rpcserver.shutdown()
tcpserver.socket.close()
tcpserver.server_close()
bpy.utils.unregister_module(__name__)
| kamathln/blenderrpc | __init__.py | Python | lgpl-3.0 | 9,571 |
'''
Created on Jan 23, 2014
@author: Chris
'''
import sys
from gooey.monkey_parser import ArgumentError
from gooey.gui.action_sorter import ActionSorter
class ClientApp(object):
def __init__(self, parser, payload):
self._parser = parser
self.description = parser.description
self.action_groups = ActionSorter(self._parser._actions)
self.payload = payload
def HasPositionals(self):
if self.action_groups._positionals:
return True
return False
def IsValidArgString(self, arg_string):
if isinstance(self._Parse(arg_string), str):
return False
return True
def _Parse(self, arg_string):
try:
print self._parser.error
self._parser.parse_args(arg_string.split())
return True
except ArgumentError as e:
return str(e)
def GetErrorMsg(self, arg_string):
return self._FormatMsg(self._Parse(arg_string))
def _FormatMsg(self, msg):
output = list(msg)
if ':' in output:
output[output.index(':')] = ':\n '
return ''.join(output)
def AddToArgv(self, arg_string):
sys.argv.extend(arg_string.split())
class EmptyClientApp(object):
def __init__(self, payload):
'''
initializes a BlankModel object
As you can see. This class does nothing..
'''
self.description = ''
self.payload = payload
if __name__ == '__main__':
pass
# print m2
| garrettcap/Bulletproof-Backup | gooey/gui/client_app.py | Python | gpl-2.0 | 1,454 |
#!/usr/bin/python
# Copyright 2014 Jens Carl, Hothead Games Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author:
- "Jens Carl (@j-carl), Hothead Games Inc."
module: redshift
version_added: "2.1"
short_description: create, delete, or modify an Amazon Redshift instance
description:
- Creates, deletes, or modifies amazon Redshift cluster instances.
options:
command:
description:
- Specifies the action to take.
required: true
choices: [ 'create', 'facts', 'delete', 'modify' ]
identifier:
description:
- Redshift cluster identifier.
required: true
node_type:
description:
- The node type of the cluster. Must be specified when command=create.
required: false
choices: ['dw1.xlarge', 'dw1.8xlarge', 'dw2.large', 'dw2.8xlarge', ]
username:
description:
- Master database username. Used only when command=create.
required: false
password:
description:
- Master database password. Used only when command=create.
required: false
cluster_type:
description:
- The type of cluster.
required: false
choices: ['multi-node', 'single-node' ]
default: 'single-node'
db_name:
description:
- Name of the database.
required: false
default: null
availability_zone:
description:
- availability zone in which to launch cluster
required: false
aliases: ['zone', 'aws_zone']
number_of_nodes:
description:
- Number of nodes. Only used when cluster_type=multi-node.
required: false
default: null
cluster_subnet_group_name:
description:
- which subnet to place the cluster
required: false
aliases: ['subnet']
cluster_security_groups:
description:
- in which security group the cluster belongs
required: false
default: null
aliases: ['security_groups']
vpc_security_group_ids:
description:
- VPC security group
required: false
aliases: ['vpc_security_groups']
default: null
preferred_maintenance_window:
description:
- maintenance window
required: false
aliases: ['maintance_window', 'maint_window']
default: null
cluster_parameter_group_name:
description:
- name of the cluster parameter group
required: false
aliases: ['param_group_name']
default: null
automated_snapshot_retention_period:
description:
- period when the snapshot take place
required: false
aliases: ['retention_period']
default: null
port:
description:
- which port the cluster is listining
required: false
default: null
cluster_version:
description:
- which version the cluster should have
required: false
aliases: ['version']
choices: ['1.0']
default: null
allow_version_upgrade:
description:
- flag to determinate if upgrade of version is possible
required: false
aliases: ['version_upgrade']
default: null
number_of_nodes:
description:
- number of the nodes the cluster should run
required: false
default: null
publicly_accessible:
description:
- if the cluster is accessible publicly or not
required: false
default: null
encrypted:
description:
- if the cluster is encrypted or not
required: false
default: null
elastic_ip:
description:
- if the cluster has an elastic IP or not
required: false
default: null
new_cluster_identifier:
description:
- Only used when command=modify.
required: false
aliases: ['new_identifier']
default: null
wait:
description:
- When command=create, modify or restore then wait for the database to enter the 'available' state. When command=delete wait for the database to be terminated.
required: false
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
requirements: [ 'boto' ]
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Basic cluster provisioning example
- redshift: >
command=create
node_type=dw1.xlarge
identifier=new_cluster
username=cluster_admin
password=1nsecure
'''
RETURN = '''
cluster:
description: dictionary containing all the cluster information
returned: success
type: dictionary
contains:
identifier:
description: Id of the cluster.
returned: success
type: string
sample: "new_redshift_cluster"
create_time:
description: Time of the cluster creation as timestamp.
returned: success
type: float
sample: 1430158536.308
status:
description: Stutus of the cluster.
returned: success
type: string
sample: "available"
db_name:
description: Name of the database.
returned: success
type: string
sample: "new_db_name"
availability_zone:
description: Amazon availability zone where the cluster is located.
returned: success
type: string
sample: "us-east-1b"
maintenance_window:
description: Time frame when maintenance/upgrade are done.
returned: success
type: string
sample: "sun:09:30-sun:10:00"
private_ip_address:
description: Private IP address of the main node.
returned: success
type: string
sample: "10.10.10.10"
public_ip_address:
description: Public IP address of the main node.
returned: success
type: string
sample: "0.0.0.0"
port:
description: Port of the cluster.
returned: success
type: int
sample: 5439
url:
description: FQDN of the main cluster node.
returned: success
type: string
sample: "new-redshift_cluster.jfkdjfdkj.us-east-1.redshift.amazonaws.com"
'''
import time
try:
import boto
from boto import redshift
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def _collect_facts(resource):
"""Transfrom cluster information to dict."""
facts = {
'identifier' : resource['ClusterIdentifier'],
'create_time' : resource['ClusterCreateTime'],
'status' : resource['ClusterStatus'],
'username' : resource['MasterUsername'],
'db_name' : resource['DBName'],
'availability_zone' : resource['AvailabilityZone'],
'maintenance_window': resource['PreferredMaintenanceWindow'],
}
for node in resource['ClusterNodes']:
if node['NodeRole'] in ('SHARED', 'LEADER'):
facts['private_ip_address'] = node['PrivateIPAddress']
break
return facts
def create_cluster(module, redshift):
"""
Create a new cluster
module: AnsibleModule object
redshift: authenticated redshift connection object
Returns:
"""
identifier = module.params.get('identifier')
node_type = module.params.get('node_type')
username = module.params.get('username')
password = module.params.get('password')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
changed = True
# Package up the optional parameters
params = {}
for p in ('db_name', 'cluster_type', 'cluster_security_groups',
'vpc_security_group_ids', 'cluster_subnet_group_name',
'availability_zone', 'preferred_maintenance_window',
'cluster_parameter_group_name',
'automated_snapshot_retention_period', 'port',
'cluster_version', 'allow_version_upgrade',
'number_of_nodes', 'publicly_accessible',
'encrypted', 'elastic_ip'):
if module.params.get( p ):
params[ p ] = module.params.get( p )
try:
redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
changed = False
except boto.exception.JSONResponseError as e:
try:
redshift.create_cluster(identifier, node_type, username, password, **params)
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
try:
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
if wait:
try:
wait_timeout = time.time() + wait_timeout
time.sleep(5)
while wait_timeout > time.time() and resource['ClusterStatus'] != 'available':
time.sleep(5)
if wait_timeout <= time.time():
module.fail_json(msg = "Timeout waiting for resource %s" % resource.id)
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
return(changed, _collect_facts(resource))
def describe_cluster(module, redshift):
"""
Collect data about the cluster.
module: Ansible module object
redshift: authenticated redshift connection object
"""
identifier = module.params.get('identifier')
try:
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
return(True, _collect_facts(resource))
def delete_cluster(module, redshift):
"""
Delete a cluster.
module: Ansible module object
redshift: authenticated redshift connection object
"""
identifier = module.params.get('identifier')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
try:
redshift.delete_custer( identifier )
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
if wait:
try:
wait_timeout = time.time() + wait_timeout
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
while wait_timeout > time.time() and resource['ClusterStatus'] != 'deleting':
time.sleep(5)
if wait_timeout <= time.time():
module.fail_json(msg = "Timeout waiting for resource %s" % resource.id)
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
return(True, {})
def modify_cluster(module, redshift):
"""
Modify an existing cluster.
module: Ansible module object
redshift: authenticated redshift connection object
"""
identifier = module.params.get('identifier')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
# Package up the optional parameters
params = {}
for p in ('cluster_type', 'cluster_security_groups',
'vpc_security_group_ids', 'cluster_subnet_group_name',
'availability_zone', 'preferred_maintenance_window',
'cluster_parameter_group_name',
'automated_snapshot_retention_period', 'port', 'cluster_version',
'allow_version_upgrade', 'number_of_nodes', 'new_cluster_identifier'):
if module.params.get(p):
params[p] = module.params.get(p)
try:
redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
changed = False
except boto.exception.JSONResponseError as e:
try:
redshift.modify_cluster(identifier, **params)
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
try:
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
if wait:
try:
wait_timeout = time.time() + wait_timeout
time.sleep(5)
while wait_timeout > time.time() and resource['ClusterStatus'] != 'available':
time.sleep(5)
if wait_timeout <= time.time():
module.fail_json(msg = "Timeout waiting for resource %s" % resource.id)
resource = redshift.describe_clusters(identifier)['DescribeClustersResponse']['DescribeClustersResult']['Clusters'][0]
except boto.exception.JSONResponseError as e:
# https://github.com/boto/boto/issues/2776 is fixed.
module.fail_json(msg=str(e))
return(True, _collect_facts(resource))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
command = dict(choices=['create', 'facts', 'delete', 'modify'], required=True),
identifier = dict(required=True),
node_type = dict(choices=['dw1.xlarge', 'dw1.8xlarge', 'dw2.large', 'dw2.8xlarge', ], required=False),
username = dict(required=False),
password = dict(no_log=True, required=False),
db_name = dict(require=False),
cluster_type = dict(choices=['multi-node', 'single-node', ], default='single-node'),
cluster_security_groups = dict(aliases=['security_groups'], type='list'),
vpc_security_group_ids = dict(aliases=['vpc_security_groups'], type='list'),
cluster_subnet_group_name = dict(aliases=['subnet']),
availability_zone = dict(aliases=['aws_zone', 'zone']),
preferred_maintenance_window = dict(aliases=['maintance_window', 'maint_window']),
cluster_parameter_group_name = dict(aliases=['param_group_name']),
automated_snapshot_retention_period = dict(aliases=['retention_period']),
port = dict(type='int'),
cluster_version = dict(aliases=['version'], choices=['1.0']),
allow_version_upgrade = dict(aliases=['version_upgrade'], type='bool'),
number_of_nodes = dict(type='int'),
publicly_accessible = dict(type='bool'),
encrypted = dict(type='bool'),
elastic_ip = dict(required=False),
new_cluster_identifier = dict(aliases=['new_identifier']),
wait = dict(type='bool', default=False),
wait_timeout = dict(default=300),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto v2.9.0+ required for this module')
command = module.params.get('command')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg=str("region not specified and unable to determine region from EC2_REGION."))
# connect to the rds endpoint
try:
conn = connect_to_aws(boto.redshift, region, **aws_connect_params)
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
changed = True
if command == 'create':
(changed, cluster) = create_cluster(module, conn)
elif command == 'facts':
(changed, cluster) = describe_cluster(module, conn)
elif command == 'delete':
(changed, cluster) = delete_cluster(module, conn)
elif command == 'modify':
(changed, cluster) = modify_cluster(module, conn)
module.exit_json(changed=changed, cluster=cluster)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| JasonCormie/ansible-modules-extras | cloud/amazon/redshift.py | Python | gpl-3.0 | 17,157 |
#!/usr/bin/python
#
# websocket clinet for test.
#
"""
Copyright (c) 2015, SUGIMOTO Norimitsu <dictoss@live.jp>
All rights reserved.
Redistribution and use in source and binary forms,
with or without modification, are permitted provided
that the following conditions are met:
1. Redistributions of source code must retain the above
copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import json
import datetime
import time
import ConfigParser
import ssl
from twisted.python import log
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory, connectWS
gconfig = None
class MyClientProtocol(WebSocketClientProtocol):
def onConnect(self, response):
print("Server connected: {0}".format(response.peer))
def onOpen(self):
data = {
"version": {
"common_version": "1",
"details_version": "1"},
"common": {
"datatype": "authentication",
"msgid": "",
"sendid": "",
"senddatetime": ""
},
"details": {
"password": gconfig.get('default', 'api_password')
},
"sender": {
"version": "1",
"userid": gconfig.get('default', 'api_userid'),
"termid": gconfig.get('default', 'api_termid')
},
"receiver": {
"version": "1",
"userid": "*",
"termid": "*"
},
}
s = json.dumps(data).encode('utf8')
self.sendMessage(s, isBinary=False)
print(s)
def onMessage(self, payload, isBinary):
print('--------')
print('EVENT : receive : %s' % datetime.datetime.now())
if isBinary:
print("Binary message received: {0} bytes".format(len(payload)))
else:
s = payload.decode('utf8')
message = json.loads(s)
print("Text message received:")
print(message)
if 'details' in message and 'datatype' in message['common']:
print('datatype: %s' % message['common']['datatype'])
if 'authentication' == message['common']['datatype']:
if '200' == message['details']['resultcode']:
print('success auth')
else:
print('fail auth')
else:
print('do task.')
else:
print('receive unknown message.')
print
def onClose(self, wasClean, code, reason):
print('Closed down. (code=%s, reason=%s)' % (code, reason))
def main():
ret = 0
global gconfig
config = None
try:
config = ConfigParser.ConfigParser()
config.read('./eqclient_autobahn.ini')
gconfig = config
print('success read config.')
except:
print('error read config. abort. (%s, %s)' % (
sys.exc_info()[0], sys.exc_info()[1]))
return 2
try:
print('try connect : %s' % (config.get('default', 'API_URL')))
#log.startLogging(sys.stdout)
factory = WebSocketClientFactory(
config.get('default', 'API_URL'),
debug=False)
factory.protocol = MyClientProtocol
connectWS(factory)
reactor.run()
except KeyboardInterrupt:
ws.close()
return 0
except:
print("EXCEPT: %s" % sys.exc_info()[1])
return 1
return ret
if __name__ == '__main__':
ret = main()
sys.exit(ret)
| dictoss/websockettools | tools/eqclient_autobahn.py | Python | bsd-2-clause | 4,573 |
from gmusic.frontend.CursedObject import CursedObject
from gmusic.core.CommandProcessor import CommandProcessor
from gmusic.core.EventHandler import EventHandler
from gmusic.core.UIParser import UIParser
import curses
import collections
class UI(CursedObject, EventHandler):
"""
Handles input from the user; handles keypress events
and even has its own command-line interface
"""
def __init__(self, draw_handler, cmd_processor, ui_parser):
CursedObject.__init__(self)
EventHandler.__init__(self)
self.attachments.append(draw_handler)
self.screen = draw_handler.screen
self.cmd_processor = cmd_processor
self.ui_parser = ui_parser
def __running__(self):
"""Runs an infinite loop so long as the request is not an exit cmd"""
request = ""
while request != 'exit':
self.get_user_input()
self.content_manager.exit()
def get_user_input(self):
'''Get the character pressed by the user, then send it'''
user_input = self.screen.getch()
# Text Entry must occur here, in curses land
if user_input == ord('i') or user_input == ord('I'):
result = self.handle_text_entry()
self.notify_attachments('PageUpdate')
return
# TODO: figure out how to have the UI listen fo the escape key
# elif user_input == 27: # 27 is escape
# del self.cli_prompt
# self.screen.refresh()
# return
# Otherwise it goes off to the UI Parser
self.ui_parser.parse(user_input)
def handle_text_entry(self):
"""Handles full-text entry, instead of keypresses"""
curses.echo()
height, width = self.screen.getmaxyx()
request = self.draw_cli_prompt(height, width)
# Push it to cmd_parser
curses.noecho()
self.cmd_processor.parse(request)
def draw_cli_prompt(self, height, width):
indentation = 2
screen_padding = 20
text_padding = 12
# Content
help_options = collections.OrderedDict()
help_options['album'] = '> album title Search for Albums'
help_options['artist'] = '> artist name Search for Artists'
help_options['playlist'] = '> playlist name Search for Playlists'
help_options['radio'] = '> radio name Search for Radios'
help_options['song'] = '> song title Search for Songs'
help_options['play'] = '> play Begins Playback'
help_options['pause'] = '> pause Pauses Playback'
help_options['next'] = '> next Play Next'
help_options['previous'] = '> previous Play Previous'
help_options['random'] = '> random Toggle Shuffle'
help_options['back'] = '> back Returns to the previous page'
help_options['main'] = '> main Back to Main Menu'
help_options['exit'] = '> exit Exit the program'
# Examples for the above (only used if the window is wide enough)
examples = {
'album': '\t\te.g.: > album Discovery',
'artist': '\t\te.g.: > artist Daft Punk',
# the playlist is a long word
# and messes up the tabbing
# TODO: see if there is a curses table implementation
# where we could just draw to table cells
'playlist': '\te.g.: > playlist Eletro-Ambient',
'radio': '\t\te.g.: > radio Armin',
'song': '\t\te.g.: > song aerodynamic'
}
# calculate the longest line of text
# this will be used to compare with the window width to see
# if we even can add the examples
longest_core_option_length = len(max(help_options.values(), key=len))
longest_example_length = len(max(examples.values(), key=len))
longest_help_text = longest_core_option_length + longest_example_length
# Height is based off the content of the cli
# + a blank line
# + a line for input
# + the top and bottom borders (2)
window_height = len(help_options) + 1 + 1 + 2
window_width = width - (screen_padding * 2)
# determine if the window width should be shorter
if window_width < (longest_help_text + screen_padding * 2):
# reduce width to fit just the help
window_width = longest_core_option_length + text_padding
else:
# window is plenty wide, add the examples
window_width = longest_help_text + text_padding
for command, text in examples.items():
help_options[command] = help_options[command] + text
# Height of this window with some padding underneath
begin_y = height - (window_height + 4)
# half the width of the screen minus half the width of the window
begin_x = int(width * 0.5) - (window_width / 2)
# Now that we have all our parameters for constraining the window,
# create the window
self.cli_prompt = win = curses.newwin(window_height, window_width, begin_y, begin_x)
win.box()
win_h, win_w = win.getmaxyx()
# The first line of text will be at this height from the top of the window
line_index = 1
last_line_index = window_height - 2
# Title
win.addstr(0, indentation, 'Commands', curses.A_BOLD)
# Add the text to the window
for command, text in help_options.items():
win.addstr(line_index, indentation, text, curses.A_DIM)
line_index += 1
#win.addstr(14, 2, 'ESC will close this modal', curses.A_DIM)
win.addstr(line_index, indentation, '', curses.A_DIM)
win.addstr(last_line_index, indentation, "> ")
# draw everything
self.screen.leaveok(1)
self.screen.refresh()
win.refresh()
# listen for commands
return win.getstr(last_line_index, indentation * 2, win_w-(indentation * 2))
| etkirsch/gmusic | gmusic/frontend/UI.py | Python | gpl-2.0 | 6,102 |
# $Id: mod_call.py 369517 2012-07-01 17:28:57Z file $
import time
import imp
import sys
import inc_const as const
from inc_cfg import *
# Load configuration
cfg_file = imp.load_source("cfg_file", ARGS[1])
# Check media flow between ua1 and ua2
def check_media(ua1, ua2):
ua1.send("#")
ua1.expect("#")
ua1.send("1122")
ua2.expect(const.RX_DTMF + "1")
ua2.expect(const.RX_DTMF + "1")
ua2.expect(const.RX_DTMF + "2")
ua2.expect(const.RX_DTMF + "2")
# Test body function
def test_func(t):
callee = t.process[0]
caller = t.process[1]
# if have_reg then wait for couple of seconds for PUBLISH
# to complete (just in case pUBLISH is used)
if callee.inst_param.have_reg:
time.sleep(1)
if caller.inst_param.have_reg:
time.sleep(1)
# Caller making call
caller.send("m")
caller.send(t.inst_params[0].uri)
caller.expect(const.STATE_CALLING)
# Callee waits for call and answers with 180/Ringing
time.sleep(0.2)
callee.expect(const.EVENT_INCOMING_CALL)
callee.send("a")
callee.send("180")
callee.expect("SIP/2.0 180")
caller.expect("SIP/2.0 180")
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Callee answers with 200/OK
callee.send("a")
callee.send("200")
# Wait until call is connected in both endpoints
time.sleep(0.2)
caller.expect(const.STATE_CONFIRMED)
callee.expect(const.STATE_CONFIRMED)
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
time.sleep(0.1)
caller.sync_stdout()
callee.sync_stdout()
# Test that media is okay
time.sleep(0.3)
check_media(caller, callee)
check_media(callee, caller)
# Hold call by caller
caller.send("H")
caller.expect("INVITE sip:")
callee.expect("INVITE sip:")
caller.expect(const.MEDIA_HOLD)
callee.expect(const.MEDIA_HOLD)
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Release hold
time.sleep(0.5)
caller.send("v")
caller.expect("INVITE sip:")
callee.expect("INVITE sip:")
caller.expect(const.MEDIA_ACTIVE, title="waiting for media active after call hold")
callee.expect(const.MEDIA_ACTIVE, title="waiting for media active after call hold")
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Test that media is okay
check_media(caller, callee)
check_media(callee, caller)
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Hold call by callee
callee.send("H")
callee.expect("INVITE sip:")
caller.expect("INVITE sip:")
caller.expect(const.MEDIA_HOLD)
callee.expect(const.MEDIA_HOLD)
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Release hold
time.sleep(0.1)
callee.send("v")
callee.expect("INVITE sip:")
caller.expect("INVITE sip:")
callee.expect(const.MEDIA_ACTIVE, title="waiting for media active after call hold")
caller.expect(const.MEDIA_ACTIVE, title="waiting for media active after call hold")
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Test that media is okay
# Wait for some time for ICE negotiation
time.sleep(0.6)
check_media(caller, callee)
check_media(callee, caller)
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# UPDATE (by caller)
caller.send("U")
#caller.sync_stdout()
callee.expect(const.MEDIA_ACTIVE, title="waiting for media active with UPDATE")
caller.expect(const.MEDIA_ACTIVE, title="waiting for media active with UPDATE")
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Test that media is okay
time.sleep(0.1)
check_media(caller, callee)
check_media(callee, caller)
# UPDATE (by callee)
callee.send("U")
callee.expect("UPDATE sip:")
caller.expect("UPDATE sip:")
caller.expect(const.MEDIA_ACTIVE, title="waiting for media active with UPDATE")
callee.expect(const.MEDIA_ACTIVE, title="waiting for media active with UPDATE")
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Test that media is okay
time.sleep(0.1)
check_media(caller, callee)
check_media(callee, caller)
# Synchronize stdout
caller.sync_stdout()
callee.sync_stdout()
# Set codecs in both caller and callee so that there is
# no common codec between them.
# In caller we only enable PCMU, in callee we only enable PCMA
caller.send("Cp")
caller.expect("Enter codec")
caller.send("* 0")
caller.send("Cp")
caller.expect("Enter codec")
caller.send("pcmu 120")
callee.send("Cp")
callee.expect("Enter codec")
callee.send("* 0")
callee.send("Cp")
callee.expect("Enter codec")
callee.send("pcma 120")
# Test when UPDATE fails (by callee)
callee.send("U")
caller.expect("SIP/2.0 488")
callee.expect("SIP/2.0 488")
callee.sync_stdout()
caller.sync_stdout()
# Test that media is still okay
time.sleep(0.1)
check_media(caller, callee)
check_media(callee, caller)
# Test when UPDATE fails (by caller)
caller.send("U")
caller.expect("UPDATE sip:")
callee.expect("UPDATE sip:")
callee.expect("SIP/2.0 488")
caller.expect("SIP/2.0 488")
caller.sync_stdout()
callee.sync_stdout()
# Test that media is still okay
time.sleep(0.1)
check_media(callee, caller)
check_media(caller, callee)
# Hangup call
time.sleep(0.1)
caller.send("h")
# Wait until calls are cleared in both endpoints
caller.expect(const.STATE_DISCONNECTED)
callee.expect(const.STATE_DISCONNECTED)
# Here where it all comes together
test = cfg_file.test_param
test.test_func = test_func
| marcelloceschia/asterisk-11-extended_codec | res/pjproject/tests/pjsua/mod_call.py | Python | gpl-2.0 | 5,335 |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='compilewheel',
version='1.0',
packages=find_packages()
)
| alex/pip | tests/data/src/compilewheel/setup.py | Python | mit | 154 |
import sys
with open(sys.argv[1], 'r') as file:
lines = file.readlines()
for line in lines:
if line:
line = line.strip()
print int(line, 16)
| tdsymonds/codeeval | python/easy/(67) hex-to-decimal.py | Python | mit | 190 |
from test.support import run_unittest
from test.script_helper import assert_python_failure, temp_dir
import unittest
import sys
import cgitb
class TestCgitb(unittest.TestCase):
def test_fonts(self):
text = "Hello Robbie!"
self.assertEqual(cgitb.small(text), "<small>{}</small>".format(text))
self.assertEqual(cgitb.strong(text), "<strong>{}</strong>".format(text))
self.assertEqual(cgitb.grey(text),
'<font color="#909090">{}</font>'.format(text))
def test_blanks(self):
self.assertEqual(cgitb.small(""), "")
self.assertEqual(cgitb.strong(""), "")
self.assertEqual(cgitb.grey(""), "")
def test_html(self):
try:
raise ValueError("Hello World")
except ValueError as err:
# If the html was templated we could do a bit more here.
# At least check that we get details on what we just raised.
html = cgitb.html(sys.exc_info())
self.assertIn("ValueError", html)
self.assertIn(str(err), html)
def test_text(self):
try:
raise ValueError("Hello World")
except ValueError as err:
text = cgitb.text(sys.exc_info())
self.assertIn("ValueError", text)
self.assertIn("Hello World", text)
def test_syshook_no_logdir_default_format(self):
with temp_dir() as tracedir:
rc, out, err = assert_python_failure(
'-c',
('import cgitb; cgitb.enable(logdir=%s); '
'raise ValueError("Hello World")') % repr(tracedir))
out = out.decode(sys.getfilesystemencoding())
self.assertIn("ValueError", out)
self.assertIn("Hello World", out)
# By default we emit HTML markup.
self.assertIn('<p>', out)
self.assertIn('</p>', out)
def test_syshook_no_logdir_text_format(self):
# Issue 12890: we were emitting the <p> tag in text mode.
with temp_dir() as tracedir:
rc, out, err = assert_python_failure(
'-c',
('import cgitb; cgitb.enable(format="text", logdir=%s); '
'raise ValueError("Hello World")') % repr(tracedir))
out = out.decode(sys.getfilesystemencoding())
self.assertIn("ValueError", out)
self.assertIn("Hello World", out)
self.assertNotIn('<p>', out)
self.assertNotIn('</p>', out)
def test_main():
run_unittest(TestCgitb)
if __name__ == "__main__":
test_main()
| Orav/kbengine | kbe/src/lib/python/Lib/test/test_cgitb.py | Python | lgpl-3.0 | 2,621 |
#
# Copyright (C) 2012 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"common AgentProxy mixin"
import time
import logging
from functools import wraps
from collections import namedtuple
from twisted.internet import reactor
from twisted.internet.defer import succeed
from twisted.internet.task import deferLater
_logger = logging.getLogger(__name__)
def cache_for_session(func):
"""Decorator for AgentProxyMixIn.getTable to cache responses"""
def _wrapper(*args, **kwargs):
self, oids = args[0], args[1]
cache = getattr(self, '_result_cache')
key = tuple(oids)
if key not in cache:
df = func(*args, **kwargs)
if df:
df.addCallback(_cache_result, cache, key)
return df
else:
return succeed(cache[key])
return wraps(func)(_wrapper)
def _cache_result(result, cache, key):
cache[key] = result
return result
def throttled(func):
"""Decorator for AgentProxyMixIn.getTable to throttle requests"""
def _wrapper(*args, **kwargs):
self = args[0]
last_request = getattr(self, '_last_request')
delay = (last_request + self.throttle_delay) - time.time()
setattr(self, '_last_request', time.time())
if delay > 0:
_logger.debug("%sss delay due to throttling: %r", delay, self)
return deferLater(reactor, delay, func, *args, **kwargs)
else:
return func(*args, **kwargs)
return wraps(func)(_wrapper)
# pylint: disable=R0903
class AgentProxyMixIn(object):
"""Common AgentProxy mix-in class.
This mix-in provides some common base functionality for ipdevpoll
AgentProxies, whether they be derived from the twistedsnmp or the pynetsmp
library. The former uses old-style classes, while the latter new-style
classes, which makes weird things happen when we want super() to work
properly.
"""
def __init__(self, *args, **kwargs):
"""Initializes an agent proxy.
:params snmp_parameters: An SNMPParameters namedtuple.
"""
if 'snmp_parameters' in kwargs:
self.snmp_parameters = kwargs['snmp_parameters']
del kwargs['snmp_parameters']
else:
self.snmp_parameters = SNMP_DEFAULTS
self._result_cache = {}
self._last_request = 0
self.throttle_delay = self.snmp_parameters.throttle_delay
super(AgentProxyMixIn, self).__init__(*args, **kwargs)
# If we're mixed in with a pure twistedsnmp AgentProxy, the timeout
# parameter will have no effect, since it is an argument to individual
# method calls.
self.timeout = self.snmp_parameters.timeout
def __repr__(self):
return "<{module}.{klass}({ip}, ...) at {ident}>".format(
module=self.__class__.__module__,
klass=self.__class__.__name__,
ip=repr(self.ip),
ident=id(self))
# hey, we're mimicking someone else's API here, never mind the bollocks:
# pylint: disable=C0111,C0103
@cache_for_session
def getTable(self, *args, **kwargs):
kwargs['maxRepetitions'] = self.snmp_parameters.max_repetitions
return super(AgentProxyMixIn, self).getTable(*args, **kwargs)
# hey, we're mimicking someone else's API here, never mind the bollocks:
# pylint: disable=C0111,C0103
@throttled
def _get(self, *args, **kwargs):
return super(AgentProxyMixIn, self)._get(*args, **kwargs)
# hey, we're mimicking someone else's API here, never mind the bollocks:
# pylint: disable=C0111,C0103
@throttled
def _walk(self, *args, **kwargs):
return super(AgentProxyMixIn, self)._walk(*args, **kwargs)
# hey, we're mimicking someone else's API here, never mind the bollocks:
# pylint: disable=C0111,C0103
@throttled
def _getbulk(self, *args, **kwargs):
return super(AgentProxyMixIn, self)._getbulk(*args, **kwargs)
# pylint: disable=C0103
SNMPParameters = namedtuple('SNMPParameters',
'timeout max_repetitions throttle_delay')
SNMP_DEFAULTS = SNMPParameters(timeout=1.5, max_repetitions=50,
throttle_delay=0)
# pylint: disable=W0212
def snmp_parameter_factory(host=None):
"""Returns specific SNMP parameters for `host`, or default values from
ipdevpoll's config if host specific values aren't available.
:returns: An SNMPParameters namedtuple.
"""
section = 'snmp'
from nav.ipdevpoll.config import ipdevpoll_conf as config
params = SNMP_DEFAULTS._asdict()
for var, getter in [
('max-repetitions', config.getint),
('timeout', config.getfloat),
('throttle-delay', config.getfloat),
]:
if config.has_option(section, var):
key = var.replace('-', '_')
params[key] = getter(section, var)
return SNMPParameters(**params)
class SnmpError(Exception):
pass
| sigmunau/nav | python/nav/ipdevpoll/snmp/common.py | Python | gpl-2.0 | 5,571 |
from Language import BTO
from Base import BaseServer,BaseClient
from utils import change_workdir, remove_workdir, path_clt2svr, path_svr2clt
import socket,sys,os,os.path,time,signal,shutil
import string,re,getopt
########################################################################
class ADIC:
"""Adic-specific functions and data for client and server"""
def __init__(self):
self.LanguageClass = C # language
self.legal_options = "d:vatCi:I:kD:hgsu" # allowed options
self.legal_longoptions=[] # allowed options with long names
###########################################################################
# name : read_script
# purpose: read specified section in ADIC init script
# args : string filename, string section_name
# output : -
# return : string contents of specified section
def read_script(self,filename,section_name):
d=open(filename,'r')
text = d.read()
d.close()
# search for [<section_name>] and succeeding text
section_pat = re.compile("\["+section_name+"\](?P<data>[.\w\s=-]*)",
re.DOTALL | re.IGNORECASE | re.MULTILINE)
section = section_pat.search(text)
if section:
return section.group('data')
else:
return ""
###########################################################################
class ADIC_Server(ADIC,BaseServer):
"""Adic-specific functions for the server"""
# def __init__(self):
# ADIC.__init__(self.rfile,wfile)
# self.rfile = rfile
# self.wfile = wfile
def check_options(self,options):
# similar to the Base class version, but additionally check/modify include path
optlist,files = getopt.getopt(options.split(),self.legal_options,self.legal_longoptions)
files = map(path_clt2svr,files)
options = []
for o,a in optlist:
if o=="-I":
options.append(string.strip(o+" "+path_clt2svr(a)))
else:
options.append(string.strip(o+" "+a))
return string.join(options+files)
###########################################################################
# name : check_include
# purpose: apply 'transfcn' to include directives which occur in 'text'
# args : string text, function (string->string) transfcn
# output : -
# return : string modified_text
def check_include(self,text,transfcn):
c = self.LanguageClass(text)
while 1:
(start,end) = c.search_include()
if start==-1: break
str = c.text[start:end]
c.replace(str,transfcn(str))
return c.text
def recv1(self):
# new version of recv1: similar to the base version, but
# with additional check/modification of path names.
# Also check pathnames in include directives.
try:
[filename,filesize] = string.split(self.rfile.readline())
filesize = int(filesize)
filename = path_clt2svr(filename)
data = self.check_include(self.rfile.read(filesize),path_clt2svr)
except ValueError:
self.send_error("error while reading file info ("+filename+")")
except RuntimeError:
self.send_error("bad filename in "+filename)
except IOError:
print "IOError"
else:
self.send_receipt()
return (filename,data)
def send1(self,filename,data):
# like send1, but replace every "dotdot/" by "../" in path names
filename = path_svr2clt(filename)
data = self.check_include(data,path_svr2clt)
self.wfile.write(filename+" "+str(len(data))+"\n")
self.wfile.write(data)
if not self.recv_receipt():
print "error while sending file",filename
raise IOError
class ADIC_Client(ADIC,BaseClient):
"""Adic-specific functions for the client"""
def check_options(self,args):
# files1 is the list of files that appear at the end of the command line.
# The commandline, if legal, will be send to the AD server in the 'options' field.
# files2 (a superset of files1) may contain additional files to be transmitted
# (e.g. header files, scripts), but they don't appear in the command line
optlist,files1 = getopt.getopt(args,self.legal_options,self.legal_longoptions)
files2=[]
ret_options = []
for o,a in optlist:
if o=="-i":
files2.append(a)
ret_options.append(string.strip(o+" "+a))
files2+=string.split(self.read_script(a,"SOURCE_FILES"))
else:
ret_options.append(string.strip(o+" "+a))
return (ret_options+files1,files1+files2)
| LighthouseHPC/lighthouse | src/oldbtoserver/BTO.py | Python | mit | 4,854 |
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.shortcuts import render
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django.views.generic import FormView
from base.ddd.utils.business_validator import MultipleBusinessExceptions
from base.views.common import display_success_messages, display_error_messages
from base.views.mixins import AjaxTemplateMixin
from ddd.logic.effective_class_repartition.commands import SearchAttributionCommand, \
SearchTutorsDistributedToClassCommand
from ddd.logic.effective_class_repartition.dtos import TutorAttributionToLearningUnitDTO, TutorClassRepartitionDTO
from infrastructure.messages_bus import message_bus_instance
from learning_unit.forms.classes.tutor_repartition import ClassTutorRepartitionForm, ClassRemoveTutorRepartitionForm, \
ClassEditTutorRepartitionForm
from learning_unit.views.learning_unit_class.common import CommonClassView
class TutorRepartitionView(CommonClassView, AjaxTemplateMixin, FormView):
template_name = "class/add_charge_repartition_inner.html"
permission_required = 'attribution.can_change_class_repartition'
form_class = ClassTutorRepartitionForm
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update(
{
'learning_unit': self.learning_unit,
'learning_unit_year': self.learning_unit_year,
'effective_class': self.effective_class,
'can_add_charge_repartition': self.request.user.has_perm(
"attribution.can_change_class_repartition", self.get_permission_object()
)
}
)
return context
@cached_property
def tutor(self) -> 'TutorAttributionToLearningUnitDTO':
cmd = SearchAttributionCommand(
learning_unit_attribution_uuid=self.kwargs['attribution_uuid'],
learning_unit_year=self.effective_class.entity_id.learning_unit_identity.year,
learning_unit_code=self.effective_class.entity_id.learning_unit_identity.code
)
return message_bus_instance.invoke(cmd)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['effective_class'] = self.effective_class
kwargs['tutor'] = self.tutor
kwargs['user'] = self.request.user
return kwargs
def post(self, request, *args, **kwargs):
form = self.form_class(
request.POST,
user=request.user,
tutor=self.tutor,
effective_class=self.effective_class
)
try:
form.save()
except MultipleBusinessExceptions as e:
display_error_messages(request, [exc.message for exc in e.exceptions])
if not form.errors:
display_success_messages(request, self.get_success_msg())
return self._ajax_response()
return render(request, self.template_name, {
"form": form,
})
def get_success_url(self):
return self.common_url_tabs()['url_class_tutors']
def get_success_msg(self) -> str:
return _("Repartition added for %(tutor)s (%(function)s)") % {
'tutor': self.tutor.full_name,
'function': self.tutor.function_text
}
class TutorRepartitionRemoveView(TutorRepartitionView):
template_name = "class/remove_charge_repartition_inner.html"
permission_required = 'attribution.can_delete_class_repartition'
form_class = ClassRemoveTutorRepartitionForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['tutor'] = self.tutor
kwargs['user'] = self.request.user
return kwargs
def get_success_msg(self) -> str:
return _("Repartition deleted for %(tutor)s (%(function)s)") % {
'tutor': self.tutor.full_name,
'function': self.tutor.function_text
}
class TutorRepartitionEditView(TutorRepartitionView):
template_name = "class/add_charge_repartition_inner.html"
permission_required = 'attribution.can_change_class_repartition'
form_class = ClassEditTutorRepartitionForm
@cached_property
def tutor(self) -> 'TutorClassRepartitionDTO':
command = SearchTutorsDistributedToClassCommand(
learning_unit_code=self.learning_unit.code,
learning_unit_year=self.learning_unit.year,
class_code=self.effective_class.class_code,
)
tutors = message_bus_instance.invoke(command)
for tutor in tutors:
if str(tutor.attribution_uuid) == str(self.kwargs['attribution_uuid']):
return tutor
def get_success_msg(self) -> str:
return _("Repartition edited for %(tutor)s (%(function)s)") % {
'tutor': self.tutor.full_name,
'function': self.tutor.function_text
}
| uclouvain/OSIS-Louvain | learning_unit/views/learning_unit_class/tutor_repartition.py | Python | agpl-3.0 | 6,168 |
import sys,getopt,string
filenames = None
fileOut = None
filePath = None
blocksize = 1024
i = 0
license ='/*\n'\
' ChibiOS - Copyright (C) 2006..2017 Giovanni Di Sirio\n'\
'\n'\
' Licensed under the Apache License, Version 2.0 (the "License");\n'\
' you may not use this file except in compliance with the License.\n'\
' You may obtain a copy of the License at\n'\
'\n'\
' http://www.apache.org/licenses/LICENSE-2.0\n'\
'\n'\
' Unless required by applicable law or agreed to in writing, software\n'\
' distributed under the License is distributed on an "AS IS" BASIS,\n'\
' WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'\
' See the License for the specific language governing permissions and\n'\
' limitations under the License.\n'\
'*/\n'
opts,args = getopt.getopt(sys.argv[1:],'f:o:p:')
for o,a in opts:
if o == '-f':
filenames = a
if o == '-o':
fileOut = a
if o == '-p':
filePath = a
filenames = filenames.split(",")
fc = open (filePath+"/"+fileOut+".c","w")
fh = open (filePath+"/"+fileOut+".h","w")
fc.write(license)
fc.write("\n#include \"hal.h\"\n\n")
fh.write(license)
fh.write("#ifndef TEST_"+fileOut.upper()+"_H_\n")
fh.write("#define TEST_"+fileOut.upper()+"_H_\n\n")
for fn in filenames:
print "opening ",fn
i = 0
f = open(fn+".enc","rb")
block = f.read(blocksize)
d = fn.split("_")
fc.write("const uint8_t ref"+d[0].upper()+"_"+d[1].upper()+"_"+d[2].upper()+"[]={\n")
fh.write("extern const uint8_t ref"+d[0].upper()+"_"+d[1].upper()+"_"+d[2].upper()+"[];\n")
str = ""
for ch in block:
i += 1
str += "0x"+format(ord(ch), '02X')+","
if i == 10:
str += "\n"
i = 0
fc.write(str)
fc.write("\n};\n")
fh.write("#endif //TEST_"+fileOut.upper()+"_H_\n")
| serdzz/ChibiOS | test/crypto/ref/genfile.py | Python | gpl-3.0 | 1,842 |
#!/usr/bin/env python
from __future__ import print_function
from collections import OrderedDict
__author__ = 'Taio'
def meminfo():
''' Return the information in /proc/meminfo
as a dictionary '''
meminfo=OrderedDict()
with open('/proc/meminfo') as f:
for line in f:
meminfo[line.split(':')[0]] = line.split(':')[1].strip()
return meminfo
if __name__=='__main__':
#print(meminfo())
meminfo = meminfo()
print('Total memory: {0}'.format(meminfo['MemTotal']))
print('Free memory: {0}'.format(meminfo['MemFree'])) | jiasir/sys-management-scripts | system-status/mem.py | Python | gpl-3.0 | 567 |
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import os
import alembic.command
from alembic.script import ScriptDirectory
from flask import current_app
from flask_migrate import Migrate, stamp
from flask_pluginengine import current_plugin
from indico.core.db import db
from indico.core.db.sqlalchemy.util.management import create_all_tables, get_all_tables
from indico.core.db.sqlalchemy.util.queries import has_extension
from indico.core.plugins import plugin_engine
from indico.util.console import cformat
migrate = Migrate(db=db)
class PluginScriptDirectory(ScriptDirectory):
"""Like `ScriptDirectory` but lets you override the paths from outside.
This is a pretty ugly hack but alembic doesn't give us a nice way to do it...
"""
dir = None
versions = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dir = PluginScriptDirectory.dir
# use __dict__ since it's a memoized property
self.__dict__['_version_locations'] = [current_plugin.alembic_versions_path]
@classmethod
def from_config(cls, config):
instance = super().from_config(config)
instance.dir = PluginScriptDirectory.dir
instance.__dict__['_version_locations'] = [current_plugin.alembic_versions_path]
return instance
def _require_extensions(*names):
missing = sorted(name for name in names if not has_extension(db.engine, name))
if not missing:
return True
print(cformat('%{red}Required Postgres extensions missing: {}').format(', '.join(missing)))
print(cformat('%{yellow}Create them using these SQL commands (as a Postgres superuser):'))
for name in missing:
print(cformat('%{white!} CREATE EXTENSION {};').format(name))
return False
def _require_pg_version(version):
# convert version string such as '9.4.10' to `90410` which is the
# format used by server_version_num
# FIXME: this will not work for versions >= 10, since the second segment there is the patch version
# but once we require a newer postgres, we can ditch the logic here and only use the major version
# since that will be the only relevant version number
req_version = sum(segment * 10**(4 - 2*i) for i, segment in enumerate(map(int, version.split('.'))))
cur_version = db.engine.execute("SELECT current_setting('server_version_num')::int").scalar()
if cur_version >= req_version:
return True
print(cformat('%{red}Postgres version too old; you need at least {} (or newer)').format(version))
return False
def _require_encoding(encoding):
cur_encoding = db.engine.execute("SELECT current_setting('server_encoding')").scalar()
if cur_encoding == encoding:
return True
print(cformat('%{red}Database encoding must be {}; got {}').format(encoding, cur_encoding))
print(cformat('%{yellow}Recreate your database using `createdb -E {} -T template0 ...`').format(encoding))
return False
def prepare_db(empty=False, root_path=None, verbose=True):
"""Initialize an empty database (create tables, set alembic rev to HEAD)."""
if not _require_pg_version('9.6'):
return
if not _require_encoding('UTF8'):
return
if not _require_extensions('unaccent', 'pg_trgm'):
return
root_path = root_path or current_app.root_path
tables = get_all_tables(db)
if 'alembic_version' not in tables['public']:
if verbose:
print(cformat('%{green}Setting the alembic version to HEAD'))
stamp(directory=os.path.join(root_path, 'migrations'), revision='heads')
PluginScriptDirectory.dir = os.path.join(root_path, 'core', 'plugins', 'alembic')
alembic.command.ScriptDirectory = PluginScriptDirectory
plugin_msg = cformat("%{cyan}Setting the alembic version of the %{cyan!}{}%{reset}%{cyan} "
"plugin to HEAD%{reset}")
for plugin in plugin_engine.get_active_plugins().values():
if not os.path.exists(plugin.alembic_versions_path):
continue
if verbose:
print(plugin_msg.format(plugin.name))
with plugin.plugin_context():
stamp(revision='heads')
# Retrieve the table list again, just in case we created unexpected tables
tables = get_all_tables(db)
tables['public'] = [t for t in tables['public'] if not t.startswith('alembic_version')]
if any(tables.values()):
if verbose:
print(cformat('%{red}Your database is not empty!'))
print(cformat('%{yellow}If you just added a new table/model, create an alembic revision instead!'))
print()
print('Tables in your database:')
for schema, schema_tables in sorted(tables.items()):
for t in schema_tables:
print(cformat(' * %{cyan}{}%{reset}.%{cyan!}{}%{reset}').format(schema, t))
return
create_all_tables(db, verbose=verbose, add_initial_data=(not empty))
| pferreir/indico | indico/core/db/sqlalchemy/migration.py | Python | mit | 5,184 |
# Twisted Imports
from twisted.internet import defer
from twisted.internet.protocol import Factory
# System Imports
from time import time as now
# Package Imports
from ..machine import Machine, Property, Stream, ui
from ..protocol.basic import QueuedLineReceiver
__all__ = ["K120", "S100"]
class K120LineReceiver (QueuedLineReceiver):
delimiter = b"\r"
# Knauer pump K120 returns "S" then two bytes (no delimiter)
# in response to a "S?" query. Induce a lineReceived in this case:
def dataReceived (self, data):
r = QueuedLineReceiver.dataReceived(self, data)
try:
if r is None \
and self._current.line == "S?" \
and len(self._buffer) >= 3:
line = self._buffer[:3]
self._buffer = self._buffer[3:]
self.lineReceived(line)
except AttributeError:
pass
return r
def unexpectedMessage (self, line):
if line == "H":
print ("Pump stopped due to an external stop signal")
elif line == "R":
print ("External stop signal removed")
elif line == "E1":
print ("Motor blocked error")
elif line == "E2":
print ("Manual stop attempt ignored")
class K120 (Machine):
"""
Control class for a Knauer WellChrom K120 HPLC pump.
Serial port settings:
Baud rate 9600, 8 bit, no parity
Requires a crossover serial cable.
"""
protocolFactory = Factory.forProtocol(K120LineReceiver)
name = "Knauer K120 HPLC Pump"
def setup (self):
def set_power (power):
return self.protocol.write("M%d" % int(power == "on"))
def set_target (target):
d = defer.Deferred()
def interpret (response):
if response == "OK":
d.callback("OK")
else:
d.errback(Exception("Flow rate too high: %s" % response))
self.protocol.write("F%d" % int(target)).addCallback(interpret)
return d
# setup variables
self.status = Property(title = "Status", type = str)
self.power = Property(title = "Power", type = str, options = ("on", "off"), setter = set_power)
self.target = Property(title = "Flow rate target", type = int, unit = "uL/min", min = 0, max = 50000, setter = set_target)
self.rate = Stream(title = "Flow rate", type = int, unit = "uL/min")
self.ui = ui(
properties = [self.rate]
)
def start (self):
# Check that the version is correct.
def interpret_version (result):
if result not in ["V03.30", "V3.1"]:
raise Exception("Incompatible pump version: {:s}".format(result))
d = self.protocol.write("V?").addCallback(interpret_version)
# setup monitor on a tick to update variables
def interpretFlowrate (result):
if result[0] != "F":
print ("Knauer Error: F? = {:s}".format(result))
return
target = float(result[1:]) * 1000
self.target._push(target) # uL/min
self.rate._push(target if self.power.value == "on" else 0)
def interpretStatus (result):
power, error = ord(result[1]), ord(result[2])
# Two bytes are sent back in binary form.
#
# The first is the status byte: 1 = on, 0 = off
# (allegedly: it turns out to be 48 and 16...)
#
# The second is the latest error code:
# 0 = no error, 1 = motor blocked, 2 = stop via keypad
# N.B. I have not been able to make the pump send one of
# these errors so this may not work correctly!
if power == 48:
self.power._push("on")
elif power == 16:
self.power._push("off")
if error == 0:
self.status._push("ok")
elif error == 1:
self.status._push("motor-blocked")
elif error == 2:
self.status._push("manual-stop")
def monitor ():
self.protocol.write("S?").addCallback(interpretStatus)
self.protocol.write("F?").addCallback(interpretFlowrate)
self._tick(monitor, 1)
return d
def stop (self):
self._stopTicks()
def reset (self):
return defer.gatherResults([
self.power.set("off"),
self.target.set(0)
])
def pause (self):
self._pauseState = self.power.value
return self.power.set("off")
def resume (self):
return self.power.set(self._pauseState)
def allowKeypad (self, allow):
return self.protocol.write("S%d" % int(not allow))
class S100LineReceiver (QueuedLineReceiver):
delimiter = "\r"
class S100 (Machine):
"""
Control class for a Knauer Smartline S100 HPLC pump.
Serial port settings:
Baud rate 9600, 8 bit, no parity
Requires a crossover serial cable.
"""
protocolFactory = Factory.forProtocol(S100LineReceiver)
name = "Knauer S100 HPLC Pump"
def setup (self):
def set_power (power):
cmd = "ON" if power == "on" else "OFF"
return self.protocol.write(cmd)
def set_target (target):
target = int(target)
return self.protocol.write("FLOW:%d" % target)
# setup variables
self.status = Property(title = "Status", type = str)
self.power = Property(title = "Power", type = str, options = ("on", "off"), setter = set_power)
self.target = Property(title = "Flow rate target", type = int, unit = "uL/min", min = 0, max = 50000, setter = set_target)
self.pressure = Stream(title = "Pressure", type = int, unit = "mbar")
self.rate = Stream(title = "Flow rate", type = int, unit = "uL/min")
self.ui = ui(
traces = [{
"title": "Pressure",
"unit": self.pressure.unit,
"traces": [self.pressure],
"colours": ["#0c4"]
}],
properties = [self.rate]
)
def start (self):
# Check that the version is correct.
def interpret_version (result):
result = result[9:].split(",")
try:
if result[1] != "KNAUER" or result[-2:] != ["1", "03"]:
raise Exception("Incompatible version: %s" % \
".".join(result[-2:]))
except IndexError:
raise Exception("Incompatible version: %s" % result)
d = self.protocol.write("IDENTIFY?").addCallback(interpret_version)
# setup monitor on a tick to update variables
def interpretStatus (result):
if result[:7] == "STATUS:":
status = result[7:].split(",")
on = int(status[0])
self.power._push("on" if on else "off")
self.target._push(float(status[1]))
self.rate._push(float(status[1]) if on else 0)
#self.pressure._push(float(status[2])) # 0.1 bar
#self.pressure._push(float(status[2]) / 10) # bar
self.pressure._push(float(status[2]) * 100) # mbar
if int(status[5]):
self.status._push("overpressure")
elif int(status[6]):
self.status._push("underpressure")
elif int(status[7]):
self.status._push("overcurrent")
elif int(status[8]):
self.status._push("undercurrent")
elif on:
self.status._push("running")
else:
self.status._push("idle")
def monitor ():
self.protocol.write("STATUS?").addCallback(interpretStatus)
self._tick(monitor, 1)
return d
def stop (self):
self._stopTicks()
def reset (self):
return defer.gatherResults([
self.power.set("off"),
self.target.set(0)
])
def pause (self):
self._pauseState = self.power.value
return self.power.set("off")
def resume (self):
return self.power.set(self._pauseState)
| richardingham/octopus | octopus/manufacturer/knauer.py | Python | mit | 6,864 |
import pytest
from pandas.core.frame import DataFrame
@pytest.fixture
def dataframe():
return DataFrame({'a': [1, 2], 'b': [3, 4]})
class TestDataFrameValidate(object):
"""Tests for error handling related to data types of method arguments."""
@pytest.mark.parametrize("func", ["query", "eval", "set_index",
"reset_index", "dropna",
"drop_duplicates", "sort_values"])
@pytest.mark.parametrize("inplace", [1, "True", [1, 2, 3], 5.0])
def test_validate_bool_args(self, dataframe, func, inplace):
msg = "For argument \"inplace\" expected type bool"
kwargs = dict(inplace=inplace)
if func == "query":
kwargs["expr"] = "a > b"
elif func == "eval":
kwargs["expr"] = "a + b"
elif func == "set_index":
kwargs["keys"] = ["a"]
elif func == "sort_values":
kwargs["by"] = ["a"]
with pytest.raises(ValueError, match=msg):
getattr(dataframe, func)(**kwargs)
| GuessWhoSamFoo/pandas | pandas/tests/frame/test_validate.py | Python | bsd-3-clause | 1,063 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Assinante',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('email', models.EmailField(unique=True, max_length=254, verbose_name=b'E-mail')),
('data_assinatura', models.DateTimeField(auto_now_add=True, verbose_name=b'Data da Assinatura')),
],
options={
'verbose_name': 'Assinante',
'verbose_name_plural': 'Assinantes',
},
),
]
| pydawan/protetores_bucais | protetores_bucais/apps/newsletter/migrations/0001_initial.py | Python | mit | 773 |
# coding=utf-8
import logging
try:
import requests
mode = "requests"
# Suppress logging
logging.getLogger("requests.packages.urllib3").setLevel(logging.WARNING)
except ImportError:
requests = None
from urllib.request import urlopen
mode = "urllib"
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
def requests_get(url):
return requests.get(url).text
def urllib_get(url):
return urlopen(url)
class Connector:
def __init__(self, fn):
self.requester = fn
@classmethod
def find_best(cls):
if mode == "requests":
return Connector(requests_get)
elif mode == "urllib":
return Connector(urllib_get)
@classmethod
def find_requests(cls):
return Connector(requests_get)\
@classmethod
def find_urllib(cls):
return Connector(urllib_get)
def get(self, url):
return self.requester(url)
| DefaltSimon/SSKJpy | sskjpy/connector.py | Python | mit | 938 |
#!/usr/bin/python
# -*- encoding: utf-8 -*-
"""
"""
from functools import partial
import inspect
import re
from types import FunctionType
from tornado.web import URLSpec, Application, RequestHandler
__author__ = 'Martin Martimeo <martin@martimeo.de>'
__date__ = '16.06.13 - 23:48'
SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS
class Route(URLSpec):
"""
Represent a route
"""
def __init__(self, url: str=None, **kwargs):
super().__init__(url, self, kwargs=kwargs)
self._url = url
self._routing = {}
@property
def url(self):
"""
URL of Route (plain without modification)
"""
return self._url
@url.setter
def url(self, pattern: str):
"""
Set a new url as pattern
:param pattern: New url value
"""
self._url = pattern
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
self._path, self._group_count = self._find_groups()
def __getattribute__(self, item):
try:
return super().__getattribute__(item)
except AttributeError:
if item.upper() in SUPPORTED_METHODS:
return partial(self.register, method=item.lower())
def register(self, function: FunctionType, method: str):
"""
Register a method function
:param function: Function to be registered
:param method: Name of method
"""
self._routing[method] = function
return self
def __get__(self, instance, cls):
if instance is None:
return self
return self._routing['get'].__get__(instance, cls)
def __call__(self, *args, **kwargs):
if isinstance(args[0], FunctionType):
self._routing['get'] = args[0]
self.__module__ = inspect.getmodule(args[0])
self.__clsname__ = args[0].__qualname__.rsplit(".", 3)[-2]
self.name = "%s.%s" % (self.__module__, args[0].__qualname__)
return self
elif isinstance(args[0], Application):
try:
self.handler = self.cls(*args, **kwargs)
except AttributeError:
self.cls = getattr(self.__module__, self.__clsname__)
self.handler = self.cls(*args, **kwargs)
for method, function in self._routing.items():
setattr(self.handler, method, function.__get__(self.handler))
self.handler.route = self
return self.handler
raise Exception()
def __repr__(self):
return '%s(%s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.url, self.kwargs, self.name)
@classmethod
def isroute(cls, other: object) -> bool:
"""
Is other an instance of Route?
:param other: Object to compare
"""
return isinstance(other, cls)
| tornado-utils/tornado-menumaker | tornado_menumaker/route.py | Python | agpl-3.0 | 3,154 |
# encoding: utf-8
# module samba.dcerpc.samr
# from /usr/lib/python2.7/dist-packages/samba/dcerpc/samr.so
# by generator 1.135
""" samr DCE/RPC """
# imports
import dcerpc as __dcerpc
import talloc as __talloc
class ValidatePasswordInfo(__talloc.Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
bad_password_time = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
bad_pwd_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
fields_present = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
last_password_change = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
lockout_time = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
pwd_history = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
pwd_history_len = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/samba/dcerpc/samr/ValidatePasswordInfo.py | Python | gpl-2.0 | 1,308 |
from django.conf import settings
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils import dateformat
from django.utils.html import escape, conditional_escape
from django.utils.text import capfirst
from django.utils.safestring import mark_safe
from django.utils.translation import get_date_formats
from django.utils.encoding import smart_unicode, force_unicode
from django.template import Library
from django.contrib.admin.templatetags.admin_list import _boolean_icon, result_headers
register = Library()
MPTT_ADMIN_LEVEL_INDENT = getattr(settings, 'MPTT_ADMIN_LEVEL_INDENT', 10)
###
# Ripped from contrib.admin's items_for_result tag.
# The only difference is we're indenting nodes according to their level.
def mptt_items_for_result(cl, result, form):
first = True
pk = cl.lookup_opts.pk.attname
# figure out which field to indent
mptt_indent_field = getattr(cl.model_admin, 'mptt_indent_field', None)
if not mptt_indent_field:
for field_name in cl.list_display:
try:
f = cl.lookup_opts.get_field(field_name)
except models.FieldDoesNotExist:
if mptt_indent_field is None:
attr = getattr(result, field_name, None)
if callable(attr):
# first callable field, use this if we can't find any model fields
mptt_indent_field = field_name
else:
# first model field, use this one
mptt_indent_field = field_name
break
# figure out how much to indent
mptt_level_indent = getattr(cl.model_admin, 'mptt_level_indent', MPTT_ADMIN_LEVEL_INDENT)
for field_name in cl.list_display:
row_class = ''
f = None
try:
f = cl.lookup_opts.get_field(field_name)
except models.FieldDoesNotExist:
# For non-field list_display values, the value is either a method,
# property or returned via a callable.
try:
if callable(field_name):
attr = field_name
value = attr(result)
elif hasattr(cl.model_admin, field_name) and \
not field_name == '__str__' and not field_name == '__unicode__':
attr = getattr(cl.model_admin, field_name)
value = attr(result)
else:
attr = getattr(result, field_name)
if callable(attr):
value = attr()
else:
value = attr
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean:
allow_tags = True
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
except (AttributeError, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if not allow_tags:
result_repr = escape(result_repr)
else:
result_repr = mark_safe(result_repr)
else:
field_val = getattr(result, f.attname)
if isinstance(f.rel, models.ManyToOneRel):
if field_val is not None:
result_repr = escape(getattr(result, f.name))
else:
result_repr = EMPTY_CHANGELIST_VALUE
# Dates and times are special: They're formatted in a certain way.
elif isinstance(f, models.DateField) or isinstance(f, models.TimeField):
if field_val:
(date_format, datetime_format, time_format) = get_date_formats()
if isinstance(f, models.DateTimeField):
result_repr = capfirst(dateformat.format(field_val, datetime_format))
elif isinstance(f, models.TimeField):
result_repr = capfirst(dateformat.time_format(field_val, time_format))
else:
result_repr = capfirst(dateformat.format(field_val, date_format))
else:
result_repr = EMPTY_CHANGELIST_VALUE
row_class = ' class="nowrap"'
# Booleans are special: We use images.
elif isinstance(f, models.BooleanField) or isinstance(f, models.NullBooleanField):
result_repr = _boolean_icon(field_val)
# DecimalFields are special: Zero-pad the decimals.
elif isinstance(f, models.DecimalField):
if field_val is not None:
result_repr = ('%%.%sf' % f.decimal_places) % field_val
else:
result_repr = EMPTY_CHANGELIST_VALUE
# Fields with choices are special: Use the representation
# of the choice.
elif f.flatchoices:
result_repr = dict(f.flatchoices).get(field_val, EMPTY_CHANGELIST_VALUE)
else:
result_repr = escape(field_val)
if force_unicode(result_repr) == '':
result_repr = mark_safe(' ')
if field_name == mptt_indent_field:
level = getattr(result, result._mptt_meta.level_attr)
padding_attr = ' style="padding-left:%spx"' % (5 + mptt_level_indent * level)
else:
padding_attr = ''
# If list_display_links not defined, add the link tag to the first field
if (first and not cl.list_display_links) or field_name in cl.list_display_links:
table_tag = {True:'th', False:'td'}[first]
first = False
url = cl.url_for_result(result)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
result_id = repr(force_unicode(value))[1:]
yield mark_safe(u'<%s%s%s><a href="%s"%s>%s</a></%s>' % \
(table_tag, row_class, padding_attr, url, (cl.is_popup and ' onclick="opener.dismissRelatedLookupPopup(window, %s); return false;"' % result_id or ''), conditional_escape(result_repr), table_tag))
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if form and field_name in form.fields:
bf = form[field_name]
result_repr = mark_safe(force_unicode(bf.errors) + force_unicode(bf))
else:
result_repr = conditional_escape(result_repr)
yield mark_safe(u'<td%s%s>%s</td>' % (row_class, padding_attr, result_repr))
if form:
yield mark_safe(u'<td>%s</td>' % force_unicode(form[cl.model._meta.pk.name]))
def mptt_results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
yield list(mptt_items_for_result(cl, res, form))
else:
for res in cl.result_list:
yield list(mptt_items_for_result(cl, res, None))
def mptt_result_list(cl):
"""
Displays the headers and data list together
"""
return {'cl': cl,
'result_headers': list(result_headers(cl)),
'results': list(mptt_results(cl))}
# custom template is merely so we can strip out sortable-ness from the column headers
mptt_result_list = register.inclusion_tag("admin/mptt_change_list_results.html")(mptt_result_list)
| samluescher/django-media-tree | media_tree/contrib/legacy_mptt_support/templatetags/mptt_admin.py | Python | bsd-3-clause | 8,088 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import warnings
import unittest
import os
from pymatgen.alchemy.transmuters import CifTransmuter, PoscarTransmuter
from pymatgen.alchemy.filters import ContainsSpecieFilter
from pymatgen.transformations.standard_transformations import \
SubstitutionTransformation, RemoveSpeciesTransformation, \
OrderDisorderedStructureTransformation
from pymatgen.transformations.advanced_transformations import \
SuperTransformation
'''
Created on Mar 5, 2012
'''
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Mar 5, 2012"
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
class CifTransmuterTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_init(self):
trans = []
trans.append(SubstitutionTransformation({"Fe": "Mn", "Fe2+": "Mn2+"}))
tsc = CifTransmuter.from_filenames([os.path.join(test_dir,
"MultiStructure.cif")],
trans)
self.assertEqual(len(tsc), 2)
expected_ans = set(["Mn", "O", "Li", "P"])
for s in tsc:
els = set([el.symbol
for el in s.final_structure.composition.elements])
self.assertEqual(expected_ans, els)
class PoscarTransmuterTest(unittest.TestCase):
def test_init(self):
trans = []
trans.append(SubstitutionTransformation({"Fe": "Mn"}))
tsc = PoscarTransmuter.from_filenames([os.path.join(test_dir,
"POSCAR"),
os.path.join(test_dir,
"POSCAR")],
trans)
self.assertEqual(len(tsc), 2)
expected_ans = set(["Mn", "O", "P"])
for s in tsc:
els = set([el.symbol
for el in s.final_structure.composition.elements])
self.assertEqual(expected_ans, els)
def test_transmuter(self):
tsc = PoscarTransmuter.from_filenames(
[os.path.join(test_dir, "POSCAR")])
tsc.append_transformation(RemoveSpeciesTransformation('O'))
self.assertEqual(len(tsc[0].final_structure), 8)
tsc.append_transformation(SubstitutionTransformation({"Fe":
{"Fe2+": 0.25,
"Mn3+": .75},
"P": "P5+"}))
tsc.append_transformation(OrderDisorderedStructureTransformation(),
extend_collection=50)
self.assertEqual(len(tsc), 4)
t = SuperTransformation([SubstitutionTransformation({"Fe2+": "Mg2+"}),
SubstitutionTransformation({"Fe2+": "Zn2+"}),
SubstitutionTransformation({"Fe2+": "Be2+"})])
tsc.append_transformation(t, extend_collection=True)
self.assertEqual(len(tsc), 12)
for x in tsc:
self.assertEqual(len(x), 5, 'something might be wrong with the number of transformations in the history') #should be 4 trans + starting structure
#test the filter
tsc.apply_filter(ContainsSpecieFilter(['Zn2+', 'Be2+', 'Mn4+'],
strict_compare=True, AND=False))
self.assertEqual(len(tsc), 8)
self.assertEqual(tsc.transformed_structures[0].as_dict()[
'history'][-1]['@class'], 'ContainsSpecieFilter')
tsc.apply_filter(ContainsSpecieFilter(['Be2+']))
self.assertEqual(len(tsc), 4)
#Test set_parameter and add_tag.
tsc.set_parameter("para1", "hello")
self.assertEqual(tsc.transformed_structures[0]
.as_dict()['other_parameters']['para1'], 'hello')
tsc.add_tags(["world", "universe"])
self.assertEqual(tsc.transformed_structures[0]
.as_dict()['other_parameters']['tags'],
["world", "universe"])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| dongsenfo/pymatgen | pymatgen/alchemy/tests/test_transmuters.py | Python | mit | 4,616 |
import os
import json
import requests
from pprint import pprint
from django.core.files import File as DjangoFile
from django.core.files.temp import NamedTemporaryFile
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from trivago2015.users.models import UserProfile
User = get_user_model()
class Command(BaseCommand):
help = 'Import events'
def handle(self, *args, **options):
with open('data/users.json') as users_file:
users_data = json.load(users_file)
for ud in users_data['results']:
pprint(ud)
ud = ud['user']
user = User()
user.username = ud['username']
user.set_password(ud['password'])
user.email = ud['email']
user.save()
user_profile = UserProfile(user=user)
user_profile.save()
self.add_image(user_profile, ud['picture']['large'])
def add_image(self, user_profile, image_url):
r = requests.get(image_url)
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(r.content)
img_temp.flush()
user_profile.image.save("image.jpg", DjangoFile(img_temp), save=True)
| ephes/trivago2015 | trivago2015/users/management/commands/import_users.py | Python | bsd-3-clause | 1,246 |
from itertools import count
from django import template
register = template.Library()
class GetCounterNode(template.Node):
def __init__(self, var_name, start=1):
self.var_name = var_name
self.start = start
def render(self, context):
context[self.var_name] = count(self.start).next
return ''
@register.tag
def get_counter(parser, token):
try:
return GetCounterNode(*token.contents.split()[1:])
except ValueError:
raise template.TemplateSyntaxError('%r tag requires arguments' % token.contents.split()[0])
| Minkov/site | judge/templatetags/counter.py | Python | agpl-3.0 | 575 |
if __name__ == '__main__':
print('''run tests via
FreeCAD_assembly2$ python2 test.py assembly2.solvers.dof_reduction_solver.tests''')
exit()
import unittest
import FreeCAD
import assembly2
import os, time, numpy
test_assembly_path = os.path.join( assembly2.__dir__ , 'assembly2', 'solvers', 'test_assemblies' )
from assembly2.solvers import solveConstraints
from assembly2.core import debugPrint
class Stats:
pass
stats = Stats()
class Test_Dof_Reduction_Solver(unittest.TestCase):
use_cache = True
@classmethod
def setUpClass(cls):
stats.t_solver = 0
stats.t_cache = 0
stats.t_start = time.time()
stats.n_attempted = 0
stats.n_solved = 0
@classmethod
def tearDownClass(cls):
debugPrint(0,'\n------------------------------------------')
debugPrint(0,' dof_reduction_solver passed %i/%i tests' % ( stats.n_solved, stats.n_attempted ) )
debugPrint(0,' time solver: %3.2f s' % stats.t_solver )
debugPrint(0,' time cached solutions: %3.2f s' % stats.t_cache )
debugPrint(0,' total running time: %3.2f s' % (time.time() - stats.t_start) )
debugPrint(0,'------------------------------------------')
def get_solver_X( self, solver_result ):
return solver_result.variableManager.X
def check_solution( self, solver_result, solution ):
a = self.get_solver_X( solver_result )
b = solution if type( solution ) != str else [ float(v) for v in solution[1:-1].split() ]
self.assertTrue(
len(a) == len(b) and numpy.allclose( a, b ),
'Solver solution incorrect: %s != %s' % ( a, b )
)
def _test_file( self, testFile_basename, solution = None ):
testFile = os.path.join( test_assembly_path, testFile_basename + '.fcstd' )
debugPrint(1, testFile_basename )
stats.n_attempted += 1
#if testFile == 'tests/testAssembly11-Pipe_assembly.fcstd':
# print('Skipping known fail')
# continue
doc = FreeCAD.open(testFile)
t_start_solver = time.time()
constraintSystem = solveConstraints( doc, solver_name = 'dof_reduction_solver', use_cache = self.use_cache, showFailureErrorDialog=False )
if solution:
self.check_solution( constraintSystem, solution )
stats.t_solver += time.time() - t_start_solver
if self.use_cache:
debugPrint(1,'\n\n')
X_org = constraintSystem.variableManager.X
t_start_cache = time.time()
#cache.debugMode = 1
constraintSystem = solveConstraints( doc, solver_name = 'dof_reduction_solver', use_cache = self.use_cache )
self.assertTrue(
numpy.allclose( X_org , constraintSystem.variableManager.X ),
'Cache solution differs from originial solution: %s != %s' % ( X_org , constraintSystem.variableManager.X )
)
#cache.debugMode = 0
stats.t_cache += time.time() - t_start_cache
constraintSystem.update()
stats.n_solved += 1
FreeCAD.closeDocument( doc.Name )
debugPrint(1,'\n\n\n')
def testAssembly_01_2_cubes( self ):
X = self._test_file( 'testAssembly_01', [ 0 ]*6 + [ 3, 2, 2, -2.35619449, 0.61547971, 2.0943951 ] )
def testAssembly_02_3_cubes( self ):
self._test_file( 'testAssembly_02', '[-14.57140808 22.69204404 0.72612381 -2.0943951 1.57079633 -2.0943951 -2.2509 4.03179 8.09739 0. 1.57079633 1.04719755 -9.57140808 31.35229808 15.70503235 -3.14159265 1.57079633 2.61799388]' )
def testAssembly_03_2_cubes( self ):
self._test_file( 'testAssembly_03', '[ 2. -0.212375 -5.54064 -1.57079633 1.1559176 0. 0. 0. 0. 0. 0. 0. ]')
def testAssembly_04_angle_constraint( self ):
self._test_file( 'testAssembly_04', '[-14.7637558 -1.81650472 16.39465332 -0.78539816 0. 1.57079633 0. 0. 0. 0. 0. 0. ]')
def testAssembly_05( self ):
self._test_file( 'testAssembly_05')
def testAssembly_06( self ):
self._test_file( 'testAssembly_06')
def testAssembly_07( self ):
self._test_file( 'testAssembly_07')
def testAssembly_08( self ):
self._test_file( 'testAssembly_08')
def testAssembly_09( self ):
self._test_file( 'testAssembly_09')
def testAssembly_10_block_iregular_constraint_order( self ):
self._test_file( 'testAssembly_10-block_iregular_constraint_order')
@unittest.skip("known failuire with lots of output")
def testAssembly_11_pipe_assembly( self ):
self._test_file( 'testAssembly_11-pipe_assembly')
def testAssembly_11b_pipe_assembly( self ):
self._test_file( 'testAssembly_11b-pipe_assembly')
def testAssembly_12_angles_clock_face( self ):
self._test_file( 'testAssembly_12-angles_clock_face')
def testAssembly_13_spherical_surfaces_hip( self ):
self._test_file( 'testAssembly_13-spherical_surfaces_hip')
def testAssembly_13_spherical_surfaces_cube_vertices( self ):
self._test_file( 'testAssembly_13-spherical_surfaces_cube_vertices')
def testAssembly_14_lock_relative_axial_rotation( self ):
self._test_file( 'testAssembly_14-lock_relative_axial_rotation')
def testAssembly_15_triangular_link_assembly( self ):
self._test_file( 'testAssembly_15-triangular_link_assembly')
def testAssembly_16_revolved_surface_objs( self ):
self._test_file( 'testAssembly_16-revolved_surface_objs')
@unittest.expectedFailure
def testAssembly_17_bspline_objects( self ):
self._test_file( 'testAssembly_17-bspline_objects')
def testAssembly_18_add_free_objects( self ):
self._test_file( 'testAssembly_18-add_free_objects')
# python2 test.py assembly2.solvers.dof_reduction_solver.tests.Tests_solverLib
class Tests_solverLib(unittest.TestCase):
def assertClose( self, a, b, tol= 10.0**-9 ):
assert type(a) == float or type(a) == int or type(a) == numpy.float64, type(a)
assert type(b) == float or type(b) == int or type(b) == numpy.float64, type(b)
self.assertTrue(
abs(a-b) < tol,
'abs(a-b) > %1.1f ( a=%s, b=%s, diff=%e)' % ( tol, a, b, a-b )
)
def assertAllClose( self, a, b):
self.assertTrue(
len(a) == len(b) and numpy.allclose( a, b ),
'a != b: %s != %s' % ( a, b )
)
def f1(self,x) :
return numpy.array([
x[0] + x[1] -1,
x[0]**2 - x[1] - 5
])
def grad_f1(self, x):
return numpy.array([
[1, 1],
[2*x[0], -1]
])
def test_solve_via_Newtons_method( self ):
from solverLib import solve_via_Newtons_method, rand
maxStep = [0.5, 0.5]
xMin = solve_via_Newtons_method( self.f1, rand(2)+3, maxStep, x_tol=0, debugPrintLevel=0 )
self.assertAllClose( xMin, [2, -1 ] )
def f2( self,X) :
y,z=X
return y + y*z + (1.0-y)**3
def grad_f2(self, X):
y,z=X
return numpy.array([ 1 + z - 3*(1.0-y)**2, y ])
def test_gradient_approx_1( self ):
'test on a function which returns a single value'
from solverLib import GradientApproximatorRandomPoints, GradientApproximatorForwardDifference, GradientApproximatorCentralDifference, rand
grad_f_rp = GradientApproximatorRandomPoints(self.f2)
grad_f_fd = GradientApproximatorForwardDifference(self.f2)
grad_f_cd = GradientApproximatorCentralDifference(self.f2)
for i in range(2):
X = rand(2)*10-5
#print(' X %s' % X)
#print(' grad_f(X) analytical: %s' % grad_f2(X))
#print(' grad_f(X) randomPoints: %s' % grad_f_rp(X))
#print(' grad_f(X) forwardDiff.: %s' % grad_f_fd(X))
#print(' grad_f(X) centralDiff.: %s' % grad_f_cd(X))
#print(' norm(analytical-randomPoints) %e' % norm(grad_f2(X) - grad_f_rp(X)) )
self.assertAllClose( self.grad_f2(X), grad_f_rp(X) )
self.assertAllClose( self.grad_f2(X), grad_f_fd(X) )
self.assertAllClose( self.grad_f2(X), grad_f_cd(X) )
def test_gradient_approx_2( self ):
'test on a function which returns multiple values'
from solverLib import GradientApproximatorRandomPoints, GradientApproximatorForwardDifference, GradientApproximatorCentralDifference, rand
grad_f_rp = GradientApproximatorRandomPoints( self.f1 )
grad_f_fd = GradientApproximatorForwardDifference( self.f1 )
grad_f_cd = GradientApproximatorCentralDifference( self.f1 )
for i in range(2):
X = rand(2)*10-5
#print(' X %s' % X)
#print(' grad_f(X) analytical:')
#prettyPrintArray(grad_f1(X), toStdOut, ' ','%1.6e')
#print(' grad_f(X) randomPoints:')
#prettyPrintArray(grad_f_rp(X), toStdOut, ' ','%1.6e')
#print(' grad_f(X) forwardDiff:')
#prettyPrintArray(grad_f_fd(X), toStdOut, ' ','%1.6e')
#print(' grad_f(X) centralDiff:')
#prettyPrintArray(grad_f_cd(X), toStdOut, ' ','%1.6e')
#print(' error rp %e' % norm(grad_f1(X) - grad_f_rp(X)))
self.assertAllClose( self.grad_f1(X), grad_f_rp(X) )
def est_plot_last_search( self ):
import solverLib
from solverLib import solve_via_Newtons_method, rand, SearchAnalyticsWrapper
maxStep = [0.5, 0.5]
xRoots = solve_via_Newtons_method( SearchAnalyticsWrapper(self.f1), rand(2)+3, maxStep, x_tol=0, debugPrintLevel=3, f_tol=10**-12)
print( solverLib.analytics['lastSearch'] )
solverLib.analytics['lastSearch'].plot()
# python2 test.py assembly2.solvers.dof_reduction_solver.tests.Tests_degrees_of_freedom
class Tests_degrees_of_freedom(unittest.TestCase):
def test( self ):
from degreesOfFreedom import PlacementDegreeOfFreedom, LinearMotionDegreeOfFreedom, AxisRotationDegreeOfFreedom, pi, normalize
from numpy.random import rand
from variableManager import VariableManager
#print('creating test FreeCAD document, constraining a single Cube')
import FreeCAD, Part
FreeCAD.newDocument("testDoc")
#FreeCAD.setActiveDocument("box")
#FreeCAD.ActiveDocument = FreeCAD.getDocument("box")
objName = "box"
box = FreeCAD.ActiveDocument.addObject("Part::FeaturePython", objName)
box.Shape = Part.makeBox(2,3,2)
#FreeCAD.ActiveDocument.recompute()
box.Placement.Base.x = rand()
box.Placement.Base.y = rand() + 1
box.Placement.Base.z = rand() + 2
#print(box.Placement)
class FakeSystem:
def __init__(self, variableManager):
self.variableManager = variableManager
vM = VariableManager(FreeCAD.ActiveDocument)
#print(vM.X)
constaintSystem = FakeSystem(vM)
#print('\nTesting PlacementDegreeOfFreedom')
for object_dof in range(6):
d = PlacementDegreeOfFreedom( constaintSystem, objName, object_dof )
#print(d)
for i in range(6):
value = pi*( rand() - 0.5 )
d.setValue(value)
assert d.getValue() == value
#print('\nTesting LinearMotionDegreeOfFreedom')
tol = 10**-14
for i in range(3):
d = LinearMotionDegreeOfFreedom( constaintSystem, objName )
d.setDirection( normalize(rand(3) - 0.5) )
#print(d)
for i in range(12):
value = 12*( rand() - 0.5 )
d.setValue(value)
returnedValue = d.getValue()
if abs(returnedValue - value) > tol :
raise ValueError("d.getValue() - value != %1.0e, [diff %e]" % (tol, returnedValue - value))
#print('\nTesting AxisRotationDegreeOfFreedom')
tol = 10**-12
for i in range(3):
d = AxisRotationDegreeOfFreedom( constaintSystem, objName )
axis_r = normalize(rand(3) - 0.5) #axis in parts co-ordinate system (i.e. relative to part)
axis = normalize(rand(3) - 0.5) # desired axis in global co-ordinate system
d.setAxis( axis, axis_r )
d.setValue(0) #update azi,ela,theta to statify aligment of axis vector
#print(d)
for i in range(6):
value = 2*pi*( rand() - 0.5 )
d.setValue(value)
returnedValue = d.getValue()
#print(' d.getValue() %f value %f, diff %e' % (returnedValue, value, returnedValue - value))
if abs(returnedValue - value) > tol :
raise ValueError("d.getValue() - value != %1.0e, [diff %e]" % (tol, returnedValue - value))
| hamish2014/FreeCAD_assembly2 | assembly2/solvers/dof_reduction_solver/tests.py | Python | lgpl-2.1 | 13,172 |
from scipy import *
import json
import scipy
import numpy as np
import time as tm
import gc
import struct
import getopt, sys
import os
import traceback
# ------------------------------------------------------------
# Logging & Timer
# ------------------------------------------------------------
logging_level = 0;
# 0 = no_logging
# 1 = few details
# 2 = many details
# 3 = many many details
def log(n, l):
if __name__=="__main__" and n <= logging_level:
for s in l:
print "Log:", s;
timer = 1;
timer_last = tm.time()
def timer_start(s):
global timer_last;
if __name__=="__main__" and timer == 1:
log(3, ["Timer start:" + s]);
timer_last = tm.time();
def timer_stop():
global timer_last;
if __name__=="__main__" and timer == 1:
log(3, ["Timer stop :" + str(tm.time() - timer_last)]);
# ------------------------------------------------------------
# ------------------------------------------------------------
# inputFile = output.bin
# outputVtx = outputVtx.obj
# outputFaces = outputFaces.obj
def readFile(V,FV,chunksize,inputFile,OUT_DIR): #outputVtx="outputVtx.obj",outputFaces="outputFaces.obj"):
outputId = os.path.basename(inputFile).split('.')[0].split('-')[1]
outputFile=OUT_DIR+"/selettori-"+outputId+".json"
LISTA_VETTORI = None
LISTA_OFFSET = None
createdLV = False
with open(inputFile, "rb") as file:
try:
while True:
count += 1
zStart = struct.unpack('>I', file.read(4))[0]
xStart = struct.unpack('>I', file.read(4))[0]
yStart = struct.unpack('>I', file.read(4))[0]
log(1, ["zStart, xStart, yStart = " + str(zStart) + "," + str(xStart) + "," + str(yStart)]);
# zStart, xStart, yStart = LISTA_OFFSET[i].astype(float64)
currentChain = np.zeros(chunksize,dtype=int32);
# log(1, ["chunksize = " + str(chunksize)]);
temp = file.read(chunksize);
# log(1, ["chunksize = OK"]);
i = 0
timer_start("currentChain " + str(i));
while (i < chunksize):
if (temp[i] == '\x01'):
currentChain[i] = 1;
i = i + 1;
timer_stop();
log(1, ["currentChain[i] = " + str(i)]);
if (createdLV == False):
LISTA_OFFSET = np.array([[zStart,xStart,yStart]], dtype=int32)
LISTA_VETTORI = np.array([currentChain], dtype=int32)
createdLV = True
else:
LISTA_OFFSET = np.append(LISTA_OFFSET, np.array([[zStart,xStart,yStart]], dtype=int32), axis=0)
LISTA_VETTORI = np.append(LISTA_VETTORI, [currentChain], axis=0)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log(1, [ "EOF or error: " + ''.join('!! ' + line for line in lines) ])
with open(outputFile, "w") as file:
json.dump({"lista_vettori":LISTA_VETTORI.tolist(), "lista_offset":LISTA_OFFSET.tolist()}, file, separators=(',',':'))
file.flush()
def main(argv):
ARGS_STRING = 'Args: -x <borderX> -y <borderY> -z <borderZ> -i <inputfile> -o <outdir>'
try:
opts, args = getopt.getopt(argv,"i:o:x:y:z:")
except getopt.GetoptError:
print ARGS_STRING
sys.exit(2)
nx = ny = nz = 64
mandatory = 3
#Files
FILE_IN = ''
OUT_DIR = ''
for opt, arg in opts:
if opt == '-x':
nx = ny = nz = int(arg)
mandatory = mandatory - 1
elif opt == '-y':
ny = nz = int(arg)
elif opt == '-z':
nz = int(arg)
elif opt == '-i':
FILE_IN = arg
mandatory = mandatory - 1
elif opt == '-o':
OUT_DIR = arg
mandatory = mandatory - 1
if mandatory != 0:
print 'Not all arguments where given'
print ARGS_STRING
sys.exit(2)
chunksize = nx * ny * nz
try:
readFile(chunksize,FILE_IN,OUT_DIR)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log(1, [ "Error: " + ''.join('!! ' + line for line in lines) ])
sys.exit(2)
if __name__ == "__main__":
main(sys.argv[1:]) | cvdlab/lar-running-demo | py/computation/old/step_convert_binarychains.py | Python | mit | 3,895 |
##
# Copyright (C) 2013 TopCoder Inc., All Rights Reserved.
##
"""
Abstract interface of writer class. Clarifying interface and duty of writer classes.
"""
__author__ = 'Easyhard'
__version__ = '1.0'
class DataWriter(object):
"""
Abstract interface of writer class.
A writer class should be able to export internal representation, i.e,
subclass of Entity Class, as external format like XML, CSV, etc.
"""
def start(self):
"""This method gives writer class a chance for initiation. It will
be called at the beginning of exporting."""
raise NotImplementedError()
def end(self):
"""
Notify that writing is finished. Writer class should finish any unfinished
job in this method.
"""
raise NotImplementedError()
def write_entity(self, entity):
"""
Each time this method is called, writer should export all fields returning
from entity.get_field_list() correctly.
"""
raise NotImplementedError()
| NASA-Tournament-Lab/CoECI-CMS-Healthcare-Fraud-Prevention | partnerclient/hfppnetwork/partner/conversion/datawriter.py | Python | apache-2.0 | 1,031 |
import attrdict
import json
import paho.mqtt.client as mqtt
from ohaut import consts
from ohaut.devices import manager
class MQTTClient(object):
def __init__(self):
self.client = mqtt.Client()
self.client.on_connect = self.on_connect
self.client.on_message = self.on_message
def connect(self, *args, **kwargs):
self.client.connect(*args, **kwargs)
@staticmethod
def on_connect(client, userdata, rc):
client.subscribe(consts.OHAUT_SUBS)
def on_message(self, client, userdata, msg):
topic_match = consts.TOPIC_RE.match(msg.topic)
device_id = topic_match.group(1)
device_end = topic_match.group(2)
if device_end == consts.DETAILS:
details = attrdict.AttrDict(
json.loads(msg.payload.decode('ascii')))
if 'version' in details:
manager.handle_device(device_id, details)
elif device_end == consts.ONLINE:
if msg.payload == b'1':
manager.handle_device_online(device_id)
else:
manager.handle_device_offline(device_id)
def loop_forever(self):
self.client.loop_forever()
| ohaut/ohaut-core | ohaut/mqtt.py | Python | gpl-3.0 | 1,197 |
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
salt.utils.vt
~~~~~~~~~~~~~
Virtual Terminal
This code has been heavily inspired by Python's subprocess code, the `non
blocking version of it`__, some minor online snippets about TTY handling
with python including `Python's own ``pty`` source code`__ and `Pexpect`__
which has already surpassed some of the pitfalls that some systems would
get us into.
.. __: http://code.activestate.com/recipes/440554/
.. __: https://github.com/python-mirror/python/blob/3.3/Lib/pty.py
.. __: https://github.com/pexpect/pexpect
'''
from __future__ import absolute_import
# Import python libs
import os
import sys
import time
import errno
import signal
import select
import logging
mswindows = (sys.platform == "win32")
if mswindows:
# pylint: disable=F0401,W0611
from win32file import ReadFile, WriteFile
from win32pipe import PeekNamedPipe
import msvcrt
import _subprocess
# pylint: enable=F0401,W0611
else:
import pty
import fcntl
import struct
import termios
import resource
# Import salt libs
import salt.utils
from salt.ext.six import string_types
from salt.log.setup import LOG_LEVELS
log = logging.getLogger(__name__)
class TerminalException(Exception):
'''
Terminal specific exception
'''
# ----- Cleanup Running Instances ------------------------------------------->
# This lists holds Terminal instances for which the underlying process had
# not exited at the time its __del__ method got called: those processes are
# wait()ed for synchronously from _cleanup() when a new Terminal object is
# created, to avoid zombie processes.
_ACTIVE = []
def _cleanup():
'''
Make sure that any terminal processes still running when __del__ was called
to the waited and cleaned up.
'''
for inst in _ACTIVE[:]:
res = inst.isalive()
if res is not True:
try:
_ACTIVE.remove(inst)
except ValueError:
# This can happen if two threads create a new Terminal instance
# It's harmless that it was already removed, so ignore.
pass
# <---- Cleanup Running Instances --------------------------------------------
class Terminal(object):
'''
I'm a virtual terminal
'''
def __init__(self,
args=None,
executable=None,
shell=False,
cwd=None,
env=None,
preexec_fn=None,
# Terminal Size
rows=None,
cols=None,
# Logging options
log_stdin=None,
log_stdin_level='debug',
log_stdout=None,
log_stdout_level='debug',
log_stderr=None,
log_stderr_level='debug',
# sys.stdXYZ streaming options
stream_stdout=None,
stream_stderr=None,
):
# Let's avoid Zombies!!!
_cleanup()
if not args and not executable and not shell:
raise TerminalException(
'You need to pass at least one of \'args\', \'executable\' '
'or \'shell=True\''
)
self.args = args
self.executable = executable
self.shell = shell
self.cwd = cwd
self.env = env
self.preexec_fn = preexec_fn
# ----- Set the desired terminal size ------------------------------->
if rows is None and cols is None:
rows, cols = self.__detect_parent_terminal_size()
elif rows is not None and cols is None:
_, cols = self.__detect_parent_terminal_size()
elif rows is None and cols is not None:
rows, _ = self.__detect_parent_terminal_size()
self.rows = rows
self.cols = cols
# <---- Set the desired terminal size --------------------------------
# ----- Internally Set Attributes ----------------------------------->
self.pid = None
self.stdin = None
self.stdout = None
self.stderr = None
self.child_fd = None
self.child_fde = None
self.closed = True
self.flag_eof_stdout = False
self.flag_eof_stderr = False
self.terminated = True
self.exitstatus = None
self.signalstatus = None
# status returned by os.waitpid
self.status = None
self.__irix_hack = 'irix' in sys.platform.lower()
# <---- Internally Set Attributes ------------------------------------
# ----- Direct Streaming Setup -------------------------------------->
if stream_stdout is True:
self.stream_stdout = sys.stdout
elif stream_stdout is False:
self.stream_stdout = None
elif stream_stdout is not None:
if not hasattr(stream_stdout, 'write') or \
not hasattr(stream_stdout, 'flush') or \
not hasattr(stream_stdout, 'close'):
raise TerminalException(
'\'stream_stdout\' needs to have at least 3 methods, '
'\'write()\', \'flush()\' and \'close()\'.'
)
self.stream_stdout = stream_stdout
else:
raise TerminalException(
'Don\'t know how to handle \'{0}\' as the VT\'s '
'\'stream_stdout\' parameter.'.format(stream_stdout)
)
if stream_stderr is True:
self.stream_stderr = sys.stderr
elif stream_stderr is False:
self.stream_stderr = None
elif stream_stderr is not None:
if not hasattr(stream_stderr, 'write') or \
not hasattr(stream_stderr, 'flush') or \
not hasattr(stream_stderr, 'close'):
raise TerminalException(
'\'stream_stderr\' needs to have at least 3 methods, '
'\'write()\', \'flush()\' and \'close()\'.'
)
self.stream_stderr = stream_stderr
else:
raise TerminalException(
'Don\'t know how to handle \'{0}\' as the VT\'s '
'\'stream_stderr\' parameter.'.format(stream_stderr)
)
# <---- Direct Streaming Setup ---------------------------------------
# ----- Spawn our terminal ------------------------------------------>
try:
self._spawn()
except Exception as err: # pylint: disable=W0703
# A lot can go wrong, so that's why we're catching the most general
# exception type
log.warning(
'Failed to spawn the VT: {0}'.format(err),
exc_info_on_loglevel=logging.DEBUG
)
raise TerminalException(
'Failed to spawn the VT. Error: {0}'.format(err)
)
log.debug(
'Child Forked! PID: {0} STDOUT_FD: {1} STDERR_FD: '
'{2}'.format(self.pid, self.child_fd, self.child_fde)
)
terminal_command = ' '.join(self.args)
if 'decode("base64")' in terminal_command or 'base64.b64decode(' in terminal_command:
log.debug('VT: Salt-SSH SHIM Terminal Command executed. Logged to TRACE')
log.trace('Terminal Command: {0}'.format(terminal_command))
else:
log.debug('Terminal Command: {0}'.format(terminal_command))
# <---- Spawn our terminal -------------------------------------------
# ----- Setup Logging ----------------------------------------------->
# Setup logging after spawned in order to have a pid value
self.stdin_logger_level = LOG_LEVELS.get(log_stdin_level, log_stdin_level)
if log_stdin is True:
self.stdin_logger = logging.getLogger(
'{0}.{1}.PID-{2}.STDIN'.format(
__name__, self.__class__.__name__, self.pid
)
)
elif log_stdin is not None:
if not isinstance(log_stdin, logging.Logger):
raise RuntimeError(
'\'log_stdin\' needs to subclass `logging.Logger`'
)
self.stdin_logger = log_stdin
else:
self.stdin_logger = None
self.stdout_logger_level = LOG_LEVELS.get(log_stdout_level, log_stdout_level)
if log_stdout is True:
self.stdout_logger = logging.getLogger(
'{0}.{1}.PID-{2}.STDOUT'.format(
__name__, self.__class__.__name__, self.pid
)
)
elif log_stdout is not None:
if not isinstance(log_stdout, logging.Logger):
raise RuntimeError(
'\'log_stdout\' needs to subclass `logging.Logger`'
)
self.stdout_logger = log_stdout
else:
self.stdout_logger = None
self.stderr_logger_level = LOG_LEVELS.get(log_stderr_level, log_stderr_level)
if log_stderr is True:
self.stderr_logger = logging.getLogger(
'{0}.{1}.PID-{2}.STDERR'.format(
__name__, self.__class__.__name__, self.pid
)
)
elif log_stderr is not None:
if not isinstance(log_stderr, logging.Logger):
raise RuntimeError(
'\'log_stderr\' needs to subclass `logging.Logger`'
)
self.stderr_logger = log_stderr
else:
self.stderr_logger = None
# <---- Setup Logging ------------------------------------------------
# ----- Common Public API ----------------------------------------------->
def send(self, data):
'''
Send data to the terminal. You are responsible to send any required
line feeds.
'''
return self._send(data)
def sendline(self, data, linesep=os.linesep):
'''
Send the provided data to the terminal appending a line feed.
'''
return self.send('{0}{1}'.format(data, linesep))
def recv(self, maxsize=None):
'''
Receive data from the terminal as a (``stdout``, ``stderr``) tuple. If
any of those is ``None`` we can no longer communicate with the
terminal's child process.
'''
if maxsize is None:
maxsize = 1024
elif maxsize < 1:
maxsize = 1
return self._recv(maxsize)
def close(self, terminate=True, kill=False):
'''
Close the communication with the terminal's child.
If ``terminate`` is ``True`` then additionally try to terminate the
terminal, and if ``kill`` is also ``True``, kill the terminal if
terminating it was not enough.
'''
if not self.closed:
if self.child_fd is not None:
os.close(self.child_fd)
self.child_fd = None
if self.child_fde is not None:
os.close(self.child_fde)
self.child_fde = None
time.sleep(0.1)
if terminate:
if not self.terminate(kill):
raise TerminalException('Failed to terminate child process.')
self.closed = True
@property
def has_unread_data(self):
return self.flag_eof_stderr is False or self.flag_eof_stdout is False
# <---- Common Public API ------------------------------------------------
# ----- Common Internal API --------------------------------------------->
def _translate_newlines(self, data):
if data is None or not data:
return
# PTY's always return \r\n as the line feeds
return data.replace('\r\n', os.linesep)
# <---- Common Internal API ----------------------------------------------
# ----- Context Manager Methods ----------------------------------------->
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close(terminate=True, kill=True)
# Wait for the process to terminate, to avoid zombies.
if self.isalive():
self.wait()
# <---- Context Manager Methods ------------------------------------------
# ----- Platform Specific Methods ------------------------------------------->
if mswindows:
# ----- Windows Methods --------------------------------------------->
def _execute(self):
raise NotImplementedError
def _spawn(self):
raise NotImplementedError
def _recv(self, maxsize):
raise NotImplementedError
def _send(self, data):
raise NotImplementedError
def send_signal(self, sig):
'''
Send a signal to the process
'''
# pylint: disable=E1101
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
os.kill(self.pid, signal.CTRL_C_EVENT)
elif sig == signal.CTRL_BREAK_EVENT:
os.kill(self.pid, signal.CTRL_BREAK_EVENT)
else:
raise ValueError('Unsupported signal: {0}'.format(sig))
# pylint: enable=E1101
def terminate(self):
'''
Terminates the process
'''
try:
_subprocess.TerminateProcess(self._handle, 1)
except OSError:
# ERROR_ACCESS_DENIED (winerror 5) is received when the
# process already died.
ecode = _subprocess.GetExitCodeProcess(self._handle)
if ecode == _subprocess.STILL_ACTIVE:
raise
self.exitstatus = ecode
kill = terminate
# <---- Windows Methods --------------------------------------------------
else:
# ----- Linux Methods ----------------------------------------------->
# ----- Internal API ------------------------------------------------>
def _spawn(self):
self.pid, self.child_fd, self.child_fde = self.__fork_ptys()
if isinstance(self.args, string_types):
args = [self.args]
elif self.args:
args = list(self.args)
else:
args = []
if self.shell and self.args:
self.args = ['/bin/sh', '-c', ' '.join(args)]
elif self.shell:
self.args = ['/bin/sh']
else:
self.args = args
if self.executable:
self.args[0] = self.executable
if self.executable is None:
self.executable = self.args[0]
if self.pid == 0:
# Child
self.stdin = sys.stdin.fileno()
self.stdout = sys.stdout.fileno()
self.stderr = sys.stderr.fileno()
# Set the terminal size
self.child_fd = self.stdin
if os.isatty(self.child_fd):
# Only try to set the window size if the parent IS a tty
try:
self.setwinsize(self.rows, self.cols)
except IOError as err:
log.warning(
'Failed to set the VT terminal size: {0}'.format(
err
),
exc_info_on_loglevel=logging.DEBUG
)
# Do not allow child to inherit open file descriptors from
# parent
max_fd = resource.getrlimit(resource.RLIMIT_NOFILE)
try:
os.closerange(pty.STDERR_FILENO + 1, max_fd[0])
except OSError:
pass
if self.cwd is not None:
os.chdir(self.cwd)
if self.preexec_fn:
self.preexec_fn()
if self.env is None:
os.execvp(self.executable, self.args)
else:
os.execvpe(self.executable, self.args, self.env)
# Parent
self.closed = False
self.terminated = False
def __fork_ptys(self):
'''
Fork the PTY
The major difference from the python source is that we separate the
stdout from stderr output.
'''
stdout_parent_fd, stdout_child_fd = pty.openpty()
if stdout_parent_fd < 0 or stdout_child_fd < 0:
raise TerminalException('Failed to open a TTY for stdout')
stderr_parent_fd, stderr_child_fd = pty.openpty()
if stderr_parent_fd < 0 or stderr_child_fd < 0:
raise TerminalException('Failed to open a TTY for stderr')
pid = os.fork()
if pid < pty.CHILD:
raise TerminalException('Failed to fork')
elif pid == pty.CHILD:
# Child.
# Close parent FDs
os.close(stdout_parent_fd)
os.close(stderr_parent_fd)
salt.utils.reinit_crypto()
# ----- Make STDOUT the controlling PTY --------------------->
child_name = os.ttyname(stdout_child_fd)
# Disconnect from controlling tty. Harmless if not already
# connected
try:
tty_fd = os.open('/dev/tty', os.O_RDWR | os.O_NOCTTY)
if tty_fd >= 0:
os.close(tty_fd)
# which exception, shouldn't we catch explicitly .. ?
except: # pylint: disable=W0702
# Already disconnected. This happens if running inside cron
pass
# New session!
os.setsid()
# Verify we are disconnected from controlling tty
# by attempting to open it again.
try:
tty_fd = os.open('/dev/tty', os.O_RDWR | os.O_NOCTTY)
if tty_fd >= 0:
os.close(tty_fd)
raise TerminalException(
'Failed to disconnect from controlling tty. It is '
'still possible to open /dev/tty.'
)
# which exception, shouldn't we catch explicitly .. ?
except: # pylint: disable=W0702
# Good! We are disconnected from a controlling tty.
pass
# Verify we can open child pty.
tty_fd = os.open(child_name, os.O_RDWR)
if tty_fd < 0:
raise TerminalException(
'Could not open child pty, {0}'.format(child_name)
)
else:
os.close(tty_fd)
# Verify we now have a controlling tty.
if os.name != 'posix':
# Only do this check in not BSD-like operating systems. BSD-like operating systems breaks at this point
tty_fd = os.open('/dev/tty', os.O_WRONLY)
if tty_fd < 0:
raise TerminalException(
'Could not open controlling tty, /dev/tty'
)
else:
os.close(tty_fd)
# <---- Make STDOUT the controlling PTY ----------------------
# ----- Duplicate Descriptors ------------------------------->
os.dup2(stdout_child_fd, pty.STDIN_FILENO)
os.dup2(stdout_child_fd, pty.STDOUT_FILENO)
os.dup2(stderr_child_fd, pty.STDERR_FILENO)
# <---- Duplicate Descriptors --------------------------------
else:
# Parent. Close Child PTY's
salt.utils.reinit_crypto()
os.close(stdout_child_fd)
os.close(stderr_child_fd)
return pid, stdout_parent_fd, stderr_parent_fd
def _send(self, data):
if self.child_fd is None:
return None
if not select.select([], [self.child_fd], [], 0)[1]:
return 0
try:
if self.stdin_logger:
self.stdin_logger.log(self.stdin_logger_level, data)
written = os.write(self.child_fd, data)
except OSError as why:
if why.errno == errno.EPIPE: # broken pipe
os.close(self.child_fd)
self.child_fd = None
return
raise
return written
def _recv(self, maxsize):
rfds = []
if self.child_fd:
rfds.append(self.child_fd)
if self.child_fde:
rfds.append(self.child_fde)
if not self.isalive():
if not rfds:
return None, None
rlist, _, _ = select.select(rfds, [], [], 0)
if not rlist:
self.flag_eof_stdout = self.flag_eof_stderr = True
log.debug('End of file(EOL). Brain-dead platform.')
return None, None
elif self.__irix_hack:
# Irix takes a long time before it realizes a child was
# terminated.
# FIXME So does this mean Irix systems are forced to always
# have a 2 second delay when calling read_nonblocking?
# That sucks.
rlist, _, _ = select.select(rfds, [], [], 2)
if not rlist:
self.flag_eof_stdout = self.flag_eof_stderr = True
log.debug('End of file(EOL). Slow platform.')
return None, None
stderr = ''
stdout = ''
# ----- Store FD Flags ------------------------------------------>
if self.child_fd:
fd_flags = fcntl.fcntl(self.child_fd, fcntl.F_GETFL)
if self.child_fde:
fde_flags = fcntl.fcntl(self.child_fde, fcntl.F_GETFL)
# <---- Store FD Flags -------------------------------------------
# ----- Non blocking Reads -------------------------------------->
if self.child_fd:
fcntl.fcntl(self.child_fd,
fcntl.F_SETFL, fd_flags | os.O_NONBLOCK)
if self.child_fde:
fcntl.fcntl(self.child_fde,
fcntl.F_SETFL, fde_flags | os.O_NONBLOCK)
# <---- Non blocking Reads ---------------------------------------
# ----- Check for any incoming data ----------------------------->
rlist, _, _ = select.select(rfds, [], [], 0)
# <---- Check for any incoming data ------------------------------
# ----- Nothing to Process!? ------------------------------------>
if not rlist:
if not self.isalive():
self.flag_eof_stdout = self.flag_eof_stderr = True
log.debug('End of file(EOL). Very slow platform.')
return None, None
# <---- Nothing to Process!? -------------------------------------
# ----- Process STDERR ------------------------------------------>
if self.child_fde in rlist:
try:
stderr = self._translate_newlines(
salt.utils.to_str(
os.read(self.child_fde, maxsize)
)
)
if not stderr:
self.flag_eof_stderr = True
stderr = None
else:
if self.stream_stderr:
self.stream_stderr.write(stderr)
self.stream_stderr.flush()
if self.stderr_logger:
stripped = stderr.rstrip()
if stripped.startswith(os.linesep):
stripped = stripped[len(os.linesep):]
if stripped:
self.stderr_logger.log(self.stderr_logger_level, stripped)
except OSError:
os.close(self.child_fde)
self.child_fde = None
self.flag_eof_stderr = True
stderr = None
finally:
if self.child_fde is not None:
fcntl.fcntl(self.child_fde, fcntl.F_SETFL, fde_flags)
# <---- Process STDERR -------------------------------------------
# ----- Process STDOUT ------------------------------------------>
if self.child_fd in rlist:
try:
stdout = self._translate_newlines(
salt.utils.to_str(
os.read(self.child_fd, maxsize)
)
)
if not stdout:
self.flag_eof_stdout = True
stdout = None
else:
if self.stream_stdout:
self.stream_stdout.write(stdout)
self.stream_stdout.flush()
if self.stdout_logger:
stripped = stdout.rstrip()
if stripped.startswith(os.linesep):
stripped = stripped[len(os.linesep):]
if stripped:
self.stdout_logger.log(self.stdout_logger_level, stripped)
except OSError:
os.close(self.child_fd)
self.child_fd = None
self.flag_eof_stdout = True
stdout = None
finally:
if self.child_fd is not None:
fcntl.fcntl(self.child_fd, fcntl.F_SETFL, fd_flags)
# <---- Process STDOUT -------------------------------------------
return stdout, stderr
def __detect_parent_terminal_size(self):
try:
TIOCGWINSZ = getattr(termios, 'TIOCGWINSZ', 1074295912)
packed = struct.pack('HHHH', 0, 0, 0, 0)
ioctl = fcntl.ioctl(sys.stdin.fileno(), TIOCGWINSZ, packed)
return struct.unpack('HHHH', ioctl)[0:2]
except IOError:
# Return a default value of 24x80
return 24, 80
# <---- Internal API -------------------------------------------------
# ----- Public API -------------------------------------------------->
def getwinsize(self):
'''
This returns the terminal window size of the child tty. The return
value is a tuple of (rows, cols).
Thank you for the shortcut PEXPECT
'''
if self.child_fd is None:
raise TerminalException(
'Can\'t check the size of the terminal since we\'re not '
'connected to the child process.'
)
TIOCGWINSZ = getattr(termios, 'TIOCGWINSZ', 1074295912)
packed = struct.pack('HHHH', 0, 0, 0, 0)
ioctl = fcntl.ioctl(self.child_fd, TIOCGWINSZ, packed)
return struct.unpack('HHHH', ioctl)[0:2]
def setwinsize(self, rows, cols):
'''
This sets the terminal window size of the child tty. This will
cause a SIGWINCH signal to be sent to the child. This does not
change the physical window size. It changes the size reported to
TTY-aware applications like vi or curses -- applications that
respond to the SIGWINCH signal.
Thank you for the shortcut PEXPECT
'''
# Check for buggy platforms. Some Python versions on some platforms
# (notably OSF1 Alpha and RedHat 7.1) truncate the value for
# termios.TIOCSWINSZ. It is not clear why this happens.
# These platforms don't seem to handle the signed int very well;
# yet other platforms like OpenBSD have a large negative value for
# TIOCSWINSZ and they don't have a truncate problem.
# Newer versions of Linux have totally different values for
# TIOCSWINSZ.
# Note that this fix is a hack.
TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561)
if TIOCSWINSZ == 2148037735:
# Same bits, but with sign.
TIOCSWINSZ = -2146929561
# Note, assume ws_xpixel and ws_ypixel are zero.
packed = struct.pack('HHHH', rows, cols, 0, 0)
fcntl.ioctl(self.child_fd, TIOCSWINSZ, packed)
def isalive(self,
_waitpid=os.waitpid,
_wnohang=os.WNOHANG,
_wifexited=os.WIFEXITED,
_wexitstatus=os.WEXITSTATUS,
_wifsignaled=os.WIFSIGNALED,
_wifstopped=os.WIFSTOPPED,
_wtermsig=os.WTERMSIG,
_os_error=os.error,
_errno_echild=errno.ECHILD,
_terminal_exception=TerminalException):
'''
This tests if the child process is running or not. This is
non-blocking. If the child was terminated then this will read the
exitstatus or signalstatus of the child. This returns True if the
child process appears to be running or False if not. It can take
literally SECONDS for Solaris to return the right status.
'''
if self.terminated:
return False
if self.has_unread_data is False:
# This is for Linux, which requires the blocking form
# of waitpid to get status of a defunct process.
# This is super-lame. The flag_eof_* would have been set
# in recv(), so this should be safe.
waitpid_options = 0
else:
waitpid_options = _wnohang
try:
pid, status = _waitpid(self.pid, waitpid_options)
except _os_error:
err = sys.exc_info()[1]
# No child processes
if err.errno == _errno_echild:
raise _terminal_exception(
'isalive() encountered condition where "terminated" '
'is 0, but there was no child process. Did someone '
'else call waitpid() on our process?'
)
else:
raise err
# I have to do this twice for Solaris.
# I can't even believe that I figured this out...
# If waitpid() returns 0 it means that no child process
# wishes to report, and the value of status is undefined.
if pid == 0:
try:
### os.WNOHANG # Solaris!
pid, status = _waitpid(self.pid, waitpid_options)
except _os_error as exc:
# This should never happen...
if exc.errno == _errno_echild:
raise _terminal_exception(
'isalive() encountered condition that should '
'never happen. There was no child process. Did '
'someone else call waitpid() on our process?'
)
else:
raise
# If pid is still 0 after two calls to waitpid() then the
# process really is alive. This seems to work on all platforms,
# except for Irix which seems to require a blocking call on
# waitpid or select, so I let recv take care of this situation
# (unfortunately, this requires waiting through the timeout).
if pid == 0:
return True
if pid == 0:
return True
if _wifexited(status):
self.status = status
self.exitstatus = _wexitstatus(status)
self.signalstatus = None
self.terminated = True
elif _wifsignaled(status):
self.status = status
self.exitstatus = None
self.signalstatus = _wtermsig(status)
self.terminated = True
elif _wifstopped(status):
raise _terminal_exception(
'isalive() encountered condition where child process is '
'stopped. This is not supported. Is some other process '
'attempting job control with our child pid?'
)
return False
def terminate(self, force=False):
'''
This forces a child process to terminate. It starts nicely with
SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This
returns True if the child was terminated. This returns False if the
child could not be terminated.
'''
if not self.closed:
self.close(terminate=False)
if not self.isalive():
return True
try:
self.send_signal(signal.SIGHUP)
time.sleep(0.1)
if not self.isalive():
return True
self.send_signal(signal.SIGCONT)
time.sleep(0.1)
if not self.isalive():
return True
self.send_signal(signal.SIGINT)
time.sleep(0.1)
if not self.isalive():
return True
if force:
self.send_signal(signal.SIGKILL)
time.sleep(0.1)
if not self.isalive():
return True
else:
return False
return False
except OSError:
# I think there are kernel timing issues that sometimes cause
# this to happen. I think isalive() reports True, but the
# process is dead to the kernel.
# Make one last attempt to see if the kernel is up to date.
time.sleep(0.1)
if not self.isalive():
return True
else:
return False
def wait(self):
'''
This waits until the child exits internally consuming any remaining
output from the child, thus, no blocking forever because the child
has unread data.
'''
if self.isalive():
while self.isalive():
stdout, stderr = self.recv()
if stdout is None:
break
if stderr is None:
break
else:
raise TerminalException('Cannot wait for dead child process.')
return self.exitstatus
def send_signal(self, sig):
'''
Send a signal to the process
'''
os.kill(self.pid, sig)
def kill(self):
'''
Kill the process with SIGKILL
'''
self.send_signal(signal.SIGKILL)
# <---- Public API ---------------------------------------------------
# <---- Linux Methods ----------------------------------------------------
# ----- Cleanup!!! ------------------------------------------------------>
def __del__(self, _maxsize=sys.maxsize, _active=_ACTIVE): # pylint: disable=W0102
# I've disabled W0102 above which is regarding a dangerous default
# value of [] for _ACTIVE, though, this is how Python itself handles
# their subprocess clean up code.
# XXX: Revisit this cleanup code to make it less dangerous.
if self.pid is None:
# We didn't get to successfully create a child process.
return
# In case the child hasn't been waited on, check if it's done.
if self.isalive() and _ACTIVE is not None:
# Child is still running, keep us alive until we can wait on it.
_ACTIVE.append(self)
# <---- Cleanup!!! -------------------------------------------------------
# <---- Platform Specific Methods --------------------------------------------
| stephane-martin/salt-debian-packaging | salt-2016.3.2/salt/utils/vt.py | Python | apache-2.0 | 37,126 |
#***************************************************************************
#* *
#* Copyright (c) 2011 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
# WARNING ##################################################################
# #
# This module is deprecated and will be removed in a future version #
# #
############################################################################
import FreeCAD, Arch, Draft, os, sys, time, Part, DraftVecUtils, uuid, math, re
from DraftTools import translate
__title__="FreeCAD IFC importer"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
# config
subtractiveTypes = ["IfcOpeningElement"] # elements that must be subtracted from their parents
SCHEMA = "http://www.steptools.com/support/stdev_docs/ifcbim/ifc4.exp" # only for internal prser
MAKETEMPFILES = False # if True, shapes are passed from ifcopenshell to freecad through temp files
DEBUG = True # this is only for the python console, this value is overridden when importing through the GUI
SKIP = ["IfcBuildingElementProxy","IfcFlowTerminal","IfcFurnishingElement"] # default. overwritten by the GUI options
IFCLINE_RE = re.compile("#(\d+)[ ]?=[ ]?(.*?)\((.*)\);[\\r]?$")
PRECISION = 4 # rounding value, in number of digits
APPLYFIX = True # if true, the ifcopenshell bug-fixing function is applied when saving files
# end config
# supported ifc products (export only):
supportedIfcTypes = ["IfcSite", "IfcBuilding", "IfcBuildingStorey", "IfcBeam", "IfcBeamStandardCase",
"IfcChimney", "IfcColumn", "IfcColumnStandardCase", "IfcCovering", "IfcCurtainWall",
"IfcDoor", "IfcDoorStandardCase", "IfcMember", "IfcMemberStandardCase", "IfcPlate",
"IfcPlateStandardCase", "IfcRailing", "IfcRamp", "IfcRampFlight", "IfcRoof",
"IfcSlab", "IfcStair", "IfcStairFlight", "IfcWall","IfcSpace",
"IfcWallStandardCase", "IfcWindow", "IfcWindowStandardCase", "IfcBuildingElementProxy",
"IfcPile", "IfcFooting", "IfcReinforcingBar", "IfcTendon"]
# TODO : shading device not supported?
if open.__module__ in ['__builtin__','io']:
pyopen = open # because we'll redefine open below
def open(filename,skip=None):
"called when freecad opens a file"
docname = os.path.splitext(os.path.basename(filename))[0]
doc = FreeCAD.newDocument(docname)
doc.Label = decode(docname)
FreeCAD.ActiveDocument = doc
getConfig()
read(filename,skip)
return doc
def insert(filename,docname,skip=None):
"called when freecad wants to import a file"
try:
doc = FreeCAD.getDocument(docname)
except NameError:
doc = FreeCAD.newDocument(docname)
FreeCAD.ActiveDocument = doc
getConfig()
read(filename,skip)
return doc
def getConfig():
"Gets Arch IFC import preferences"
global SKIP, CREATE_IFC_GROUPS, ASMESH, PREFIX_NUMBERS, FORCE_PYTHON_PARSER, SEPARATE_OPENINGS, SEPARATE_PLACEMENTS, JOINSOLIDS, AGGREGATE_WINDOWS
IMPORT_IFC_FURNITURE = False
ASMESH = ["IfcFurnishingElement"]
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
CREATE_IFC_GROUPS = p.GetBool("createIfcGroups",False)
FORCE_PYTHON_PARSER = p.GetBool("forceIfcPythonParser",False)
DEBUG = p.GetBool("ifcDebug",False)
SEPARATE_OPENINGS = p.GetBool("ifcSeparateOpenings",False)
SEPARATE_PLACEMENTS = p.GetBool("ifcSeparatePlacements",False)
PREFIX_NUMBERS = p.GetBool("ifcPrefixNumbers",False)
JOINSOLIDS = p.GetBool("ifcJoinSolids",False)
AGGREGATE_WINDOWS = p.GetBool("ifcAggregateWindows",False)
skiplist = p.GetString("ifcSkip","")
if skiplist:
SKIP = skiplist.split(",")
asmeshlist = p.GetString("ifcAsMesh","")
if asmeshlist:
ASMESH = asmeshlist.split(",")
def getIfcOpenShell():
"locates and imports ifcopenshell"
global IFCOPENSHELL5
global IfcImport
IFCOPENSHELL5 = False
try:
import IfcImport
except ImportError:
try:
import ifc_wrapper as IfcImport
except ImportError:
FreeCAD.Console.PrintMessage(translate("Arch","Couldn't locate IfcOpenShell\n"))
return False
else:
IFCOPENSHELL5 = True
return True
else:
if hasattr(IfcImport,"IfcFile"):
IFCOPENSHELL5 = True
return True
def read(filename,skip=None):
"Parses an IFC file"
# parsing the IFC file
t1 = time.time()
processedIds = []
skipIds = skip
if not skipIds:
skipIds = []
elif isinstance(skipIds,int):
skipIds = [skipIds]
if getIfcOpenShell() and not FORCE_PYTHON_PARSER:
# use the IfcOpenShell parser
# preparing IfcOpenShell
if DEBUG: global ifcObjects,ifcParents
ifcObjects = {} # a table to relate ifc id with freecad object
ifcParents = {} # a table to relate ifc id with parent id
if SEPARATE_OPENINGS:
if not IFCOPENSHELL5:
if hasattr(IfcImport,"DISABLE_OPENING_SUBTRACTIONS"):
IfcImport.Settings(IfcImport.DISABLE_OPENING_SUBTRACTIONS,True)
else:
SKIP.append("IfcOpeningElement")
useShapes = False
if IFCOPENSHELL5:
useShapes = True
if hasattr(IfcImport,"clean"):
IfcImport.clean()
elif hasattr(IfcImport,"USE_BREP_DATA"):
IfcImport.Settings(IfcImport.USE_BREP_DATA,True)
useShapes = True
else:
if DEBUG: print("Warning: IfcOpenShell version very old, unable to handle Brep data")
# opening file
if IFCOPENSHELL5:
global ifc
ifc = IfcImport.open(filename)
objects = ifc.by_type("IfcProduct")
num_lines = len(objects)
relations = ifc.by_type("IfcRelAggregates") + ifc.by_type("IfcRelContainedInSpatialStructure") + ifc.by_type("IfcRelVoidsElement")
if not objects:
print("Error opening IFC file")
return
else:
num_lines = sum(1 for line in pyopen(filename))
if not IfcImport.Init(filename):
print("Error opening IFC file")
return
# processing geometry
idx = 0
while True:
objparentid = []
if IFCOPENSHELL5:
obj = objects[idx]
idx += 1
objid = int(str(obj).split("=")[0].strip("#"))
objname = obj.get_argument(obj.get_argument_index("Name"))
objtype = str(obj).split("=")[1].split("(")[0]
for r in relations:
if r.is_a("IfcRelAggregates"):
for c in getAttr(r,"RelatedObjects"):
if str(obj) == str(c):
objparentid.append(int(str(getAttr(r,"RelatingObject")).split("=")[0].strip("#")))
elif r.is_a("IfcRelContainedInSpatialStructure"):
for c in getAttr(r,"RelatedElements"):
if str(obj) == str(c):
objparentid.append(int(str(getAttr(r,"RelatingStructure")).split("=")[0].strip("#")))
elif r.is_a("IfcRelVoidsElement"):
if str(obj) == str(getAttr(r,"RelatedOpeningElement")):
objparentid.append(int(str(getAttr(r,"RelatingBuildingElement")).split("=")[0].strip("#")))
else:
if hasattr(IfcImport, 'GetBrepData'):
obj = IfcImport.GetBrepData()
else:
obj = IfcImport.Get()
objid = obj.id
idx = objid
objname = obj.name
objtype = obj.type
objparentid.append(obj.parent_id)
if DEBUG: print("["+str(int((float(idx)/num_lines)*100))+"%] parsing ",objid,": ",objname," of type ",objtype)
# retrieving name
n = getCleanName(objname,objid,objtype)
# skip IDs
if objid in skipIds:
if DEBUG: print(" skipping because object ID is in skip list")
nobj = None
# skip types
elif objtype in SKIP:
if DEBUG: print(" skipping because type is in skip list")
nobj = None
# check if object was already processed, to workaround an ifcopenshell bug
elif objid in processedIds:
if DEBUG: print(" skipping because this object was already processed")
else:
# build shape
shape = None
if useShapes:
shape = getShape(obj,objid)
# walls
if objtype in ["IfcWallStandardCase","IfcWall"]:
nobj = makeWall(objid,shape,n)
# windows
elif objtype in ["IfcWindow","IfcDoor"]:
nobj = makeWindow(objid,shape,n)
# structs
elif objtype in ["IfcBeam","IfcColumn","IfcSlab","IfcFooting"]:
nobj = makeStructure(objid,shape,objtype,n)
# roofs
elif objtype in ["IfcRoof"]:
nobj = makeRoof(objid,shape,n)
# furniture
elif objtype in ["IfcFurnishingElement"]:
nobj = FreeCAD.ActiveDocument.addObject("Part::Feature",n)
nobj.Shape = shape
# sites
elif objtype in ["IfcSite"]:
nobj = makeSite(objid,shape,n)
# floors
elif objtype in ["IfcBuildingStorey"]:
nobj = Arch.makeFloor(name=n)
nobj.Label = n
# floors
elif objtype in ["IfcBuilding"]:
nobj = Arch.makeBuilding(name=n)
nobj.Label = n
# spaces
elif objtype in ["IfcSpace"]:
nobj = makeSpace(objid,shape,n)
elif shape:
# treat as dumb parts
if DEBUG: print("Fixme: Shape-containing object not handled: ",objid, " ", objtype)
nobj = FreeCAD.ActiveDocument.addObject("Part::Feature",n)
nobj.Label = n
nobj.Shape = shape
else:
# treat as meshes
if DEBUG: print("Warning: Object without shape: ",objid, " ", objtype)
if hasattr(obj,"mesh"):
if not hasattr(obj.mesh, 'verts'):
obj = IfcImport.Get() # Get triangulated rep of same product
me,pl = getMesh(obj)
nobj = FreeCAD.ActiveDocument.addObject("Mesh::Feature",n)
nobj.Label = n
nobj.Mesh = me
nobj.Placement = pl
else:
if DEBUG: print("Error: Skipping object without mesh: ",objid, " ", objtype)
# registering object number and parent
if objparentid:
ifcParents[objid] = []
for p in objparentid:
ifcParents[objid].append([p,not (objtype in subtractiveTypes)])
ifcObjects[objid] = nobj
processedIds.append(objid)
if IFCOPENSHELL5:
if idx >= len(objects):
break
else:
if not IfcImport.Next():
break
# processing non-geometry and relationships
parents_temp = dict(ifcParents)
import ArchCommands
#print(parents_temp)
while parents_temp:
id, comps = parents_temp.popitem()
for c in comps:
parent_id = c[0]
additive = c[1]
if (id <= 0) or (parent_id <= 0):
# root dummy object
parent = None
elif parent_id in ifcObjects:
parent = ifcObjects[parent_id]
# check if parent is a subtraction, if yes parent to grandparent
if parent_id in ifcParents:
for p in ifcParents[parent_id]:
if p[1] == False:
grandparent_id = p[0]
if grandparent_id in ifcObjects:
parent = ifcObjects[grandparent_id]
else:
# creating parent if needed
if IFCOPENSHELL5:
obj = ifc.by_id(parent_id)
parentid = int(str(obj).split("=")[0].strip("#"))
parentname = obj.get_argument(obj.get_argument_index("Name"))
parenttype = str(obj).split("=")[1].split("(")[0]
else:
obj = IfcImport.GetObject(parent_id)
parentid = obj.id
parentname = obj.name
parenttype = obj.type
#if DEBUG: print("["+str(int((float(idx)/num_lines)*100))+"%] parsing ",parentid,": ",parentname," of type ",parenttype)
n = getCleanName(parentname,parentid,parenttype)
if parentid <= 0:
parent = None
elif parenttype == "IfcBuildingStorey":
parent = Arch.makeFloor(name=n)
parent.Label = n
elif parenttype == "IfcBuilding":
parent = Arch.makeBuilding(name=n)
parent.Label = n
elif parenttype == "IfcSite":
parent = Arch.makeSite(name=n)
parent.Label = n
elif parenttype == "IfcWindow":
parent = Arch.makeWindow(name=n)
parent.Label = n
elif parenttype == "IfcProject":
parent = None
else:
if DEBUG: print("Fixme: skipping unhandled parent: ", parentid, " ", parenttype)
parent = None
# registering object number and parent
if not IFCOPENSHELL5:
if parent_ifcobj.parent_id > 0:
ifcParents[parentid] = [parent_ifcobj.parent_id,True]
parents_temp[parentid] = [parent_ifcobj.parent_id,True]
if parent and (not parentid in ifcObjects):
ifcObjects[parentid] = parent
# attributing parent
if parent and (id in ifcObjects):
if ifcObjects[id] and (ifcObjects[id].Name != parent.Name):
if additive:
if DEBUG: print("adding ",ifcObjects[id].Name, " to ",parent.Name)
ArchCommands.addComponents(ifcObjects[id],parent)
else:
if DEBUG: print("removing ",ifcObjects[id].Name, " from ",parent.Name)
ArchCommands.removeComponents(ifcObjects[id],parent)
if not IFCOPENSHELL5:
IfcImport.CleanUp()
else:
# use only the internal python parser
FreeCAD.Console.PrintWarning(translate("Arch","IfcOpenShell not found or disabled, falling back on internal parser.\n"))
schema=getSchema()
if schema:
if DEBUG: print("opening",filename,"...")
ifc = IfcDocument(filename,schema=schema)
else:
FreeCAD.Console.PrintWarning(translate("Arch","IFC Schema not found, IFC import disabled.\n"))
return None
t2 = time.time()
if DEBUG: print("Successfully loaded",ifc,"in %s s" % ((t2-t1)))
# getting walls
for w in ifc.getEnt("IfcWallStandardCase"):
nobj = makeWall(w)
# getting windows and doors
for w in (ifc.getEnt("IfcWindow") + ifc.getEnt("IfcDoor")):
nobj = makeWindow(w)
# getting structs
for w in (ifc.getEnt("IfcSlab") + ifc.getEnt("IfcBeam") + ifc.getEnt("IfcColumn") \
+ ifc.getEnt("IfcFooting")):
nobj = makeStructure(w)
# getting floors
for f in ifc.getEnt("IfcBuildingStorey"):
group(f,ifc,"Floor")
# getting buildings
for b in ifc.getEnt("IfcBuilding"):
group(b,ifc,"Building")
# getting sites
for s in ifc.getEnt("IfcSite"):
group(s,ifc,"Site")
if DEBUG: print("done parsing. Recomputing...")
FreeCAD.ActiveDocument.recompute()
t3 = time.time()
if DEBUG: print("done processing IFC file in %s s" % ((t3-t1)))
return None
def getCleanName(name,ifcid,ifctype):
"Get a clean name from an ifc object"
#print("getCleanName called",name,ifcid,ifctype)
n = name
if not n:
n = ifctype
if PREFIX_NUMBERS:
n = "ID"+str(ifcid)+" "+n
#for c in ",.!?;:":
# n = n.replace(c,"_")
return n
def makeWall(entity,shape=None,name="Wall"):
"makes a wall in the freecad document"
try:
if shape:
# use ifcopenshell
if isinstance(shape,Part.Shape):
body = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_body")
body.Shape = shape
else:
body = FreeCAD.ActiveDocument.addObject("Mesh::Feature",name+"_body")
body.Mesh = shape
wall = Arch.makeWall(body,name=name)
wall.Label = name
if DEBUG: print(" made wall object ",entity,":",wall)
return wall
# use internal parser
if DEBUG: print("=====> making wall",entity.id)
placement = wall = wire = body = width = height = None
placement = getPlacement(entity.ObjectPlacement)
if DEBUG: print(" got wall placement",entity.id,":",placement)
width = entity.getProperty("Width")
height = entity.getProperty("Height")
if width and height:
if DEBUG: print(" got width, height ",entity.id,":",width,"/",height)
for r in entity.Representation.Representations:
if r.RepresentationIdentifier == "Axis":
wire = getWire(r.Items,placement)
wall = Arch.makeWall(wire,width,height,align="Center",name="Wall"+str(entity.id))
else:
if DEBUG: print(" no height or width properties found...")
for r in entity.Representation.Representations:
if r.RepresentationIdentifier == "Body":
for b in r.Items:
if b.type == "IFCEXTRUDEDAREASOLID":
norm = getVector(b.ExtrudedDirection)
norm.normalize()
wire = getWire(b.SweptArea,placement)
wall = Arch.makeWall(wire,width=0,height=b.Depth,name="Wall"+str(entity.id))
wall.Normal = norm
if wall:
if DEBUG: print(" made wall object ",entity.id,":",wall)
return wall
if DEBUG: print(" error: skipping wall",entity.id)
return None
except:
if DEBUG: print(" error: skipping wall",entity)
return None
def makeWindow(entity,shape=None,name="Window"):
"makes a window in the freecad document"
try:
if shape:
# use ifcopenshell
if isinstance(shape,Part.Shape):
window = Arch.makeWindow(name=name)
window.Shape = shape
window.Label = name
if DEBUG: print(" made window object ",entity,":",window)
return window
# use internal parser
if DEBUG: print("=====> making window",entity.id)
placement = window = wire = body = width = height = None
placement = getPlacement(entity.ObjectPlacement)
if DEBUG: print("got window placement",entity.id,":",placement)
width = entity.getProperty("Width")
height = entity.getProperty("Height")
for r in entity.Representation.Representations:
if r.RepresentationIdentifier == "Body":
for b in r.Items:
if b.type == "IFCEXTRUDEDAREASOLID":
wire = getWire(b.SweptArea,placement)
window = Arch.makeWindow(wire,width=b.Depth,name=objtype+str(entity.id))
if window:
if DEBUG: print(" made window object ",entity.id,":",window)
return window
if DEBUG: print(" error: skipping window",entity.id)
return None
except:
if DEBUG: print(" error: skipping window",entity)
return None
def makeStructure(entity,shape=None,ifctype=None,name="Structure"):
"makes a structure in the freecad document"
try:
if shape:
# use ifcopenshell
if isinstance(shape,Part.Shape):
body = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_body")
body.Shape = shape
else:
body = FreeCAD.ActiveDocument.addObject("Mesh::Feature",name+"_body")
body.Mesh = shape
structure = Arch.makeStructure(body,name=name)
structure.Label = name
if ifctype == "IfcBeam":
structure.Role = "Beam"
elif ifctype == "IfcColumn":
structure.Role = "Column"
elif ifctype == "IfcSlab":
structure.Role = "Slab"
elif ifctype == "IfcFooting":
structure.Role = "Foundation"
if DEBUG: print(" made structure object ",entity,":",structure," (type: ",ifctype,")")
return structure
# use internal parser
if DEBUG: print("=====> making struct",entity.id)
placement = structure = wire = body = width = height = None
placement = getPlacement(entity.ObjectPlacement)
if DEBUG: print("got window placement",entity.id,":",placement)
width = entity.getProperty("Width")
height = entity.getProperty("Height")
for r in entity.Representation.Representations:
if r.RepresentationIdentifier == "Body":
for b in r.Items:
if b.type == "IFCEXTRUDEDAREASOLID":
wire = getWire(b.SweptArea,placement)
structure = Arch.makeStructure(wire,height=b.Depth,name=objtype+str(entity.id))
if structure:
if DEBUG: print(" made structure object ",entity.id,":",structure)
return structure
if DEBUG: print(" error: skipping structure",entity.id)
return None
except:
if DEBUG: print(" error: skipping structure",entity)
return None
def makeSite(entity,shape=None,name="Site"):
"makes a site in the freecad document"
try:
body = None
if shape:
# use ifcopenshell
if isinstance(shape,Part.Shape):
body = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_body")
body.Shape = shape
else:
body = FreeCAD.ActiveDocument.addObject("Mesh::Feature",name+"_body")
body.Mesh = shape
site = Arch.makeSite(name=name)
site.Label = name
if body:
site.Terrain = body
if DEBUG: print(" made site object ",entity,":",site)
return site
except:
return None
def makeSpace(entity,shape=None,name="Space"):
"makes a space in the freecad document"
try:
if shape:
# use ifcopenshell
if isinstance(shape,Part.Shape):
space = Arch.makeSpace(name=name)
space.Label = name
body = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_body")
body.Shape = shape
space.Base = body
body.ViewObject.hide()
if DEBUG: print(" made space object ",entity,":",space)
return space
except:
return None
def makeRoof(entity,shape=None,name="Roof"):
"makes a roof in the freecad document"
try:
if shape:
# use ifcopenshell
if isinstance(shape,Part.Shape):
roof = Arch.makeRoof(name=name)
roof.Label = name
roof.Shape = shape
if DEBUG: print(" made roof object ",entity,":",roof)
return roof
except:
return None
# geometry helpers ###################################################################
def getMesh(obj):
"gets mesh and placement from an IfcOpenShell object"
if IFCOPENSHELL5:
return None,None
print("fixme: mesh data not yet supported") # TODO implement this with OCC tessellate
import Mesh
meshdata = []
print(obj.mesh.faces)
print(obj.mesh.verts)
f = obj.mesh.faces
v = obj.mesh.verts
for i in range(0, len(f), 3):
face = []
for j in range(3):
vi = f[i+j]*3
face.append([v[vi],v[vi+1],v[vi+2]])
meshdata.append(face)
print(meshdata)
me = Mesh.Mesh(meshdata)
# get transformation matrix
m = obj.matrix
mat = FreeCAD.Matrix(m[0], m[3], m[6], m[9],
m[1], m[4], m[7], m[10],
m[2], m[5], m[8], m[11],
0, 0, 0, 1)
pl = FreeCAD.Placement(mat)
return me,pl
def getShape(obj,objid):
"gets a shape from an IfcOpenShell object"
#print("retrieving shape from obj ",objid)
import Part
sh=Part.Shape()
brep_data = None
if IFCOPENSHELL5:
try:
if hasattr(IfcImport,"SEW_SHELLS"):
ss = IfcImport.SEW_SHELLS
else:
ss = 0
if SEPARATE_OPENINGS and hasattr(IfcImport,"DISABLE_OPENING_SUBTRACTIONS"):
if SEPARATE_PLACEMENTS and hasattr(IfcImport,"DISABLE_OBJECT_PLACEMENT"):
brep_data = IfcImport.create_shape(obj,IfcImport.DISABLE_OPENING_SUBTRACTIONS | IfcImport.DISABLE_OBJECT_PLACEMENT | ss)
else:
brep_data = IfcImport.create_shape(obj,IfcImport.DISABLE_OPENING_SUBTRACTIONS | ss)
else:
if SEPARATE_PLACEMENTS and hasattr(IfcImport,"DISABLE_OBJECT_PLACEMENT"):
brep_data = IfcImport.create_shape(obj,IfcImport.DISABLE_OBJECT_PLACEMENT | ss)
else:
brep_data = IfcImport.create_shape(obj, ss)
except:
print("Unable to retrieve shape data")
else:
brep_data = obj.mesh.brep_data
if brep_data:
try:
if MAKETEMPFILES:
import tempfile
th,tf = tempfile.mkstemp(suffix=".brp")
of = pyopen(tf,"wb")
of.write(brep_data)
of.close()
os.close(th)
sh = Part.read(tf)
os.remove(tf)
else:
sh.importBrepFromString(brep_data)
except:
print(" error: malformed shape")
return None
else:
if IFCOPENSHELL5 and SEPARATE_PLACEMENTS:
p = getPlacement(getAttr(obj,"ObjectPlacement"))
if p:
sh.Placement = p
if not sh.Solids:
# try to extract a solid shape
if sh.Faces:
try:
if DEBUG: print(" malformed solid. Attempting to fix...")
shell = Part.makeShell(sh.Faces)
if shell:
solid = Part.makeSolid(shell)
if solid:
sh = solid
except:
if DEBUG: print(" failed to retrieve solid from object ",objid)
else:
if DEBUG: print(" object ", objid, " doesn't contain any geometry")
if not IFCOPENSHELL5:
m = obj.matrix
mat = FreeCAD.Matrix(m[0], m[3], m[6], m[9],
m[1], m[4], m[7], m[10],
m[2], m[5], m[8], m[11],
0, 0, 0, 1)
sh.Placement = FreeCAD.Placement(mat)
# if DEBUG: print("getting Shape from ",obj)
#print("getting shape: ",sh,sh.Solids,sh.Volume,sh.isValid(),sh.isNull())
#for v in sh.Vertexes: print(v.Point)
if sh:
if not sh.isNull():
return sh
return None
def getPlacement(entity):
"returns a placement from the given entity"
if not entity:
return None
if DEBUG: print(" getting placement ",entity)
if IFCOPENSHELL5:
if isinstance(entity,int):
entity = ifc.by_id(entity)
entitytype = str(entity).split("=")[1].split("(")[0].upper()
entityid = int(str(entity).split("=")[0].strip("#"))
else:
entitytype = entity.type.upper()
entityid = entity.id
pl = None
if entitytype == "IFCAXIS2PLACEMENT3D":
x = getVector(getAttr(entity,"RefDirection"))
z = getVector(getAttr(entity,"Axis"))
if not(x) or not(z):
return None
y = z.cross(x)
loc = getVector(getAttr(entity,"Location"))
m = DraftVecUtils.getPlaneRotation(x,y,z)
pl = FreeCAD.Placement(m)
pl.move(loc)
elif entitytype == "IFCLOCALPLACEMENT":
pl = getPlacement(getAttr(entity,"PlacementRelTo"))
relpl = getPlacement(getAttr(entity,"RelativePlacement"))
if pl and relpl:
pl = relpl.multiply(pl)
elif relpl:
pl = relpl
elif entitytype == "IFCCARTESIANPOINT":
loc = getVector(entity)
pl = FreeCAD.Placement()
pl.move(loc)
if DEBUG: print(" made placement for ",entityid,":",pl)
return pl
def getAttr(entity,attr):
"returns the given attribute from the given entity"
if IFCOPENSHELL5:
if isinstance(entity,int):
entity = ifc.by_id(entity)
i = entity.get_argument_index(attr)
return entity.get_argument(i)
else:
return getattr(entity,attr)
def getVector(entity):
"returns a vector from the given entity"
if not entity:
return None
if DEBUG: print(" getting point from ",entity)
if IFCOPENSHELL5:
if isinstance(entity,int):
entity = ifc.by_id(entity)
entitytype = str(entity).split("=")[1].split("(")[0].upper()
else:
entitytype = entity.type.upper()
if entitytype == "IFCDIRECTION":
DirectionRatios = getAttr(entity,"DirectionRatios")
if len(DirectionRatios) == 3:
return FreeCAD.Vector(tuple(DirectionRatios))
else:
return FreeCAD.Vector(tuple(DirectionRatios+[0]))
elif entitytype == "IFCCARTESIANPOINT":
Coordinates = getAttr(entity,"Coordinates")
if len(Coordinates) == 3:
return FreeCAD.Vector(tuple(Coordinates))
else:
return FreeCAD.Vector(tuple(Coordinates+[0]))
return None
# below is only used by the internal parser #########################################
def decode(name):
"decodes encoded strings"
try:
decodedName = (name.decode("utf8"))
except UnicodeDecodeError:
try:
decodedName = (name.decode("latin1"))
except UnicodeDecodeError:
FreeCAD.Console.PrintError(translate("Arch", "Error: Couldn't determine character encoding\n"))
decodedName = name
return decodedName
def getSchema():
"retrieves the express schema"
custom = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch").GetString("CustomIfcSchema","")
if custom:
if os.path.exists(custom):
if DEBUG: print("Using custom schema: ",custom.split(os.sep)[-1])
return custom
p = None
p = os.path.join(FreeCAD.ConfigGet("UserAppData"),SCHEMA.split(os.sep)[-1])
if os.path.exists(p):
return p
import ArchCommands
p = ArchCommands.download(SCHEMA)
if p:
return p
return None
def group(entity,ifc,mode=None):
"gathers the children of the given entity"
# only used by the internal parser
try:
if DEBUG: print("=====> making group",entity.id)
placement = None
placement = getPlacement(entity.ObjectPlacement)
if DEBUG: print("got cell placement",entity.id,":",placement)
subelements = ifc.find("IFCRELCONTAINEDINSPATIALSTRUCTURE","RelatingStructure",entity)
subelements.extend(ifc.find("IFCRELAGGREGATES","RelatingObject",entity))
elts = []
for s in subelements:
if hasattr(s,"RelatedElements"):
s = s.RelatedElements
if not isinstance(s,list): s = [s]
elts.extend(s)
elif hasattr(s,"RelatedObjects"):
s = s.RelatedObjects
if not isinstance(s,list): s = [s]
elts.extend(s)
elif hasattr(s,"RelatedObject"):
s = s.RelatedObject
if not isinstance(s,list): s = [s]
elts.extend(s)
print("found dependent elements: ",elts)
groups = [['Wall',['IfcWallStandardCase'],[]],
['Window',['IfcWindow','IfcDoor'],[]],
['Structure',['IfcSlab','IfcFooting','IfcBeam','IfcColumn'],[]],
['Floor',['IfcBuildingStorey'],[]],
['Building',['IfcBuilding'],[]],
['Furniture',['IfcFurnishingElement'],[]]]
for e in elts:
for g in groups:
for t in g[1]:
if e.type.upper() == t.upper():
if hasattr(FreeCAD.ActiveDocument,g[0]+str(e.id)):
g[2].append(FreeCAD.ActiveDocument.getObject(g[0]+str(e.id)))
print("groups:",groups)
comps = []
if CREATE_IFC_GROUPS:
if DEBUG:wprint("creating subgroups")
for g in groups:
if g[2]:
if g[0] in ['Building','Floor']:
comps.extend(g[2])
else:
fcg = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroup",g[0]+"s")
for o in g[2]:
fcg.addObject(o)
comps.append(fcg)
else:
for g in groups:
comps.extend(g[2])
label = entity.Name
name = mode + str(entity.id)
cell = None
if mode == "Site":
cell = Arch.makeSite(comps,name=name)
elif mode == "Floor":
cell = Arch.makeFloor(comps,name=name)
elif mode == "Building":
cell = Arch.makeBuilding(comps,name=name)
if label and cell:
cell.Label = label
except:
if DEBUG: print("error: skipping group ",entity.id)
def getWire(entity,placement=None):
"returns a wire (created in the freecad document) from the given entity"
# only used by the internal parser
if DEBUG: print("making Wire from :",entity)
if not entity: return None
if entity.type == "IFCPOLYLINE":
pts = []
for p in entity.Points:
pts.append(getVector(p))
return Draft.getWire(pts,placement=placement)
elif entity.type == "IFCARBITRARYCLOSEDPROFILEDEF":
pts = []
for p in entity.OuterCurve.Points:
pts.append(getVector(p))
return Draft.getWire(pts,closed=True,placement=placement)
# EXPORT ##########################################################
def export(exportList,filename):
"called when freecad exports a file"
global ifcw
ifcw = None
try:
import IfcImport as ifcw
except ImportError:
try:
import ifc_wrapper as ifcw
except ImportError:
FreeCAD.Console.PrintError(translate("Arch","Error: IfcOpenShell is not installed\n"))
print("""importIFC: ifcOpenShell is not installed. IFC export is unavailable.
Note: IFC export currently requires an experimental version of IfcOpenShell
available from https://github.com/aothms/IfcOpenShell""")
return
if (not hasattr(ifcw,"IfcFile")) and (not hasattr(ifcw,"file")):
FreeCAD.Console.PrintError(translate("Arch","Error: your IfcOpenShell version is too old\n"))
print("""importIFC: The version of ifcOpenShell installed on this system doesn't
have IFC export capabilities. IFC export currently requires an experimental
version of IfcOpenShell available from https://github.com/aothms/IfcOpenShell""")
return
import Arch,Draft
# creating base IFC project
getConfig()
PRECISION = Draft.precision()
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
scaling = p.GetFloat("IfcScalingFactor",1.0)
exporttxt = p.GetBool("IfcExportList",False)
forcebrep = p.GetBool("ifcExportAsBrep",False)
application = "FreeCAD"
ver = FreeCAD.Version()
version = ver[0]+"."+ver[1]+" build"+ver[2]
owner = FreeCAD.ActiveDocument.CreatedBy
company = FreeCAD.ActiveDocument.Company
project = FreeCAD.ActiveDocument.Name
ifc = IfcWriter(filename,project,owner,company,application,version)
txt = []
# get all children and reorder list to get buildings and floors processed first
objectslist = Draft.getGroupContents(exportList,walls=True,addgroups=True)
objectslist = Arch.pruneIncluded(objectslist)
sites = []
buildings = []
floors = []
groups = {}
others = []
for obj in objectslist:
otype = Draft.getType(obj)
if otype == "Site":
sites.append(obj)
elif otype == "Building":
buildings.append(obj)
elif otype == "Floor":
floors.append(obj)
elif otype == "Group":
groups[obj.Name] = []
else:
others.append(obj)
objectslist = buildings + floors + others
if DEBUG: print("adding ", len(objectslist), " objects")
global unprocessed
unprocessed = []
# process objects
for obj in objectslist:
otype = Draft.getType(obj)
name = str(obj.Label)
parent = Arch.getHost(obj)
gdata = None
fdata = None
placement = None
color = None
representation = None
descr = None
extra = None
# setting the IFC type
if hasattr(obj,"Role"):
ifctype = obj.Role.replace(" ","")
else:
ifctype = otype
if ifctype == "Foundation":
ifctype = "Footing"
elif ifctype == "Rebar":
ifctype = "ReinforcingBar"
elif ifctype in ["Part","Undefined"]:
ifctype = "BuildingElementProxy"
# getting the "Force BREP" flag
brepflag = False
if hasattr(obj,"IfcAttributes"):
if "FlagForceBrep" in obj.IfcAttributes.keys():
if obj.IfcAttributes["FlagForceBrep"] == "True":
brepflag = True
if DEBUG: print("Adding " + obj.Label + " as Ifc" + ifctype)
# writing IFC data
if obj.isDerivedFrom("App::DocumentObjectGroup"):
# getting parent building
if parent:
parent = ifc.findByName("IfcBuilding",str(parent.Label))
if otype == "Site":
print(" Skipping (not implemented yet)") # TODO manage sites
elif otype == "Building":
ifc.addBuilding( name=name )
elif otype == "Floor":
ifc.addStorey( building=parent, name=name )
elif obj.isDerivedFrom("Part::Feature"):
# get color
if FreeCAD.GuiUp:
color = obj.ViewObject.ShapeColor[:3]
# get parent floor
if parent:
parent = ifc.findByName("IfcBuildingStorey",str(parent.Label))
# get representation
if (not forcebrep) and (not brepflag):
gdata = getIfcExtrusionData(obj,scaling,SEPARATE_OPENINGS)
#if DEBUG: print(" extrusion data for ",obj.Label," : ",gdata)
if not gdata:
fdata = getIfcBrepFacesData(obj,scaling)
#if DEBUG: print(" brep data for ",obj.Label," : ",fdata)
if not fdata:
if obj.isDerivedFrom("Part::Feature"):
print(" Error retrieving the shape of object ", obj.Label)
unprocessed.append(obj)
continue
else:
if DEBUG: print(" No geometry")
else:
if DEBUG: print(" Brep")
else:
if DEBUG: print(" Extrusion")
if gdata:
# gdata = [ type, profile data, extrusion data, placement data ]
placement = ifc.addPlacement(origin=gdata[3][0],xaxis=gdata[3][1],zaxis=gdata[3][2])
if gdata[0] == "polyline":
representation = ifc.addExtrudedPolyline(gdata[1], gdata[2], color=color)
elif gdata[0] == "circle":
representation = ifc.addExtrudedCircle(gdata[1], gdata[2], color=color)
elif gdata[0] == "ellipse":
representation = ifc.addExtrudedEllipse(gdata[1], gdata[2], color=color)
elif gdata[0] == "composite":
representation = ifc.addExtrudedCompositeCurve(gdata[1], gdata[2], color=color)
else:
print("debug: unknow extrusion type")
elif fdata:
representation = [ifc.addFacetedBrep(f, color=color) for f in fdata]
# create ifc object
ifctype = "Ifc" + ifctype
if hasattr(obj,"Description"):
descr = obj.Description
if otype == "Wall":
if gdata:
if gdata[0] == "polyline":
ifctype = "IfcWallStandardCase"
elif otype == "Structure":
if ifctype in ["IfcSlab","IfcFooting"]:
extra = ["NOTDEFINED"]
elif otype == "Window":
extra = [obj.Width.Value*scaling, obj.Height.Value*scaling]
elif otype == "Space":
extra = ["ELEMENT","INTERNAL",getIfcElevation(obj)]
elif otype == "Part":
extra = ["ELEMENT"]
if not ifctype in supportedIfcTypes:
if DEBUG: print(" Type ",ifctype," is not supported yet. Exporting as IfcBuildingElementProxy instead")
ifctype = "IfcBuildingElementProxy"
extra = ["ELEMENT"]
product = ifc.addProduct( ifctype, representation, storey=parent, placement=placement, name=name, description=descr, extra=extra )
if product:
# removing openings
if SEPARATE_OPENINGS and gdata:
for o in obj.Subtractions:
print("Subtracting ",o.Label)
fdata = getIfcBrepFacesData(o,scaling,sub=True)
representation = [ifc.addFacetedBrep(f, color=color) for f in fdata]
p2 = ifc.addProduct( "IfcOpeningElement", representation, storey=product, placement=None, name=str(o.Label), description=None)
# writing text log
spacer = ""
for i in range(36-len(obj.Label)):
spacer += " "
txt.append(obj.Label + spacer + ifctype)
# adding object to group, if any
for g in groups.keys():
group = FreeCAD.ActiveDocument.getObject(g)
if group:
for o in group.Group:
if o.Name == obj.Name:
groups[g].append(product)
else:
unprocessed.append(obj)
else:
if DEBUG: print("Object type ", otype, " is not supported yet.")
# processing groups
for name,entities in groups.iteritems():
if entities:
o = FreeCAD.ActiveDocument.getObject(name)
if o:
if DEBUG: print("Adding group ", o.Label, " with ",len(entities)," elements")
grp = ifc.addGroup( entities, o.Label )
ifc.write()
if exporttxt:
import time, os
txtstring = "List of objects exported by FreeCAD in file\n"
txtstring += filename + "\n"
txtstring += "On " + time.ctime() + "\n"
txtstring += "\n"
txtstring += str(len(txt)) + " objects exported:\n"
txtstring += "\n"
txtstring += "Nr Name Type\n"
txtstring += "\n"
for i in range(len(txt)):
idx = str(i+1)
sp = ""
for j in range(8-len(idx)):
sp += " "
txtstring += idx + sp + txt[i] + "\n"
txtfile = os.path.splitext(filename)[0]+".txt"
f = pyopen(txtfile,"wb")
f.write(txtstring)
f.close()
FreeCAD.ActiveDocument.recompute()
if unprocessed:
print("\nWARNING: " + str(len(unprocessed)) + " objects were not exported (stored in importIFC.unprocessed):")
for o in unprocessed:
print(" " + o.Label)
def getTuples(data,scale=1,placement=None,normal=None,close=True):
"""getTuples(data,[scale,placement,normal,close]): returns a tuple or a list of tuples from a vector
or from the vertices of a shape. Scale can indicate a scale factor"""
rnd = False
import Part
if isinstance(data,FreeCAD.Vector):
if placement:
data = placement.multVec(data)
if rnd:
data = DraftVecUtils.rounded(data)
return (data.x*scale,data.y*scale,data.z*scale)
elif isinstance(data,Part.Shape):
t = []
if len(data.Wires) == 1:
import Part,DraftGeomUtils
data = Part.Wire(Part.__sortEdges__(data.Wires[0].Edges))
verts = data.Vertexes
try:
c = data.CenterOfMass
v1 = verts[0].Point.sub(c)
v2 = verts[1].Point.sub(c)
if DraftVecUtils.angle(v2,v1,normal) >= 0:
# inverting verts order if the direction is couterclockwise
verts.reverse()
except:
pass
for v in verts:
pt = v.Point
if placement:
if not placement.isNull():
pt = placement.multVec(pt)
if rnd:
pt = DraftVecUtils.rounded(pt)
t.append((pt.x*scale,pt.y*scale,pt.z*scale))
if close: # faceloops must not be closed, but ifc profiles must.
t.append(t[0])
else:
print("Arch.getTuples(): Wrong profile data")
return t
def getIfcExtrusionData(obj,scale=1,nosubs=False):
"""getIfcExtrusionData(obj,[scale,nosubs]): returns a closed path (a list of tuples), a tuple expressing an extrusion
vector, and a list of 3 tuples for base position, x axis and z axis. Or returns None, if a base loop and
an extrusion direction cannot be extracted. Scale can indicate a scale factor."""
CURVEMODE = "PARAMETER" # For trimmed curves. CARTESIAN or PARAMETER
if hasattr(obj,"Additions"):
if obj.Additions:
# TODO provisorily treat objs with additions as breps
return None
if hasattr(obj,"Subtractions") and not nosubs:
if obj.Subtractions:
return None
if hasattr(obj,"Proxy"):
if hasattr(obj.Proxy,"getProfiles"):
p = obj.Proxy.getProfiles(obj,noplacement=True)
v = obj.Proxy.getExtrusionVector(obj,noplacement=True)
if (len(p) == 1) and v:
p = p[0]
r = FreeCAD.Placement()
#b = p.CenterOfMass
r = obj.Proxy.getPlacement(obj)
#b = obj.Placement.multVec(FreeCAD.Vector())
#r.Rotation = DraftVecUtils.getRotation(v,FreeCAD.Vector(0,0,1))
d = [r.Base,DraftVecUtils.rounded(r.Rotation.multVec(FreeCAD.Vector(1,0,0))),DraftVecUtils.rounded(r.Rotation.multVec(FreeCAD.Vector(0,0,1)))]
#r = r.inverse()
#print("getExtrusionData: computed placement:",r)
import Part
if len(p.Edges) == 1:
if isinstance(p.Edges[0].Curve,Part.Circle):
# Circle profile
r1 = p.Edges[0].Curve.Radius*scale
return "circle", [getTuples(p.Edges[0].Curve.Center,scale), r1], getTuples(v,scale), d
elif isinstance(p.Edges[0].Curve,Part.Ellipse):
# Ellipse profile
r1 = p.Edges[0].Curve.MajorRadius*scale
r2 = p.Edges[0].Curve.MinorRadius*scale
return "ellipse", [getTuples(p.Edges[0].Curve.Center,scale), r1, r2], getTuples(v,scale), d
curves = False
for e in p.Edges:
if isinstance(e.Curve,Part.Circle):
curves = True
elif not isinstance(e.Curve,Part.LineSegment):
print("Arch.getIfcExtrusionData: Warning: unsupported edge type in profile")
if curves:
# Composite profile
ecurves = []
last = None
import DraftGeomUtils
edges = Part.__sortEdges__(p.Edges)
for e in edges:
if isinstance(e.Curve,Part.Circle):
import math
follow = True
if last:
if not DraftVecUtils.equals(last,e.Vertexes[0].Point):
follow = False
last = e.Vertexes[0].Point
else:
last = e.Vertexes[-1].Point
else:
last = e.Vertexes[-1].Point
p1 = math.degrees(-DraftVecUtils.angle(e.Vertexes[0].Point.sub(e.Curve.Center)))
p2 = math.degrees(-DraftVecUtils.angle(e.Vertexes[-1].Point.sub(e.Curve.Center)))
da = DraftVecUtils.angle(e.valueAt(e.FirstParameter+0.1).sub(e.Curve.Center),e.Vertexes[0].Point.sub(e.Curve.Center))
if p1 < 0:
p1 = 360 + p1
if p2 < 0:
p2 = 360 + p2
if da > 0:
follow = not(follow)
if CURVEMODE == "CARTESIAN":
# BUGGY
p1 = getTuples(e.Vertexes[0].Point,scale)
p2 = getTuples(e.Vertexes[-1].Point,scale)
ecurves.append(["arc",getTuples(e.Curve.Center,scale),e.Curve.Radius*scale,[p1,p2],follow,CURVEMODE])
else:
verts = [vertex.Point for vertex in e.Vertexes]
if last:
if not DraftVecUtils.equals(last,verts[0]):
verts.reverse()
last = e.Vertexes[0].Point
else:
last = e.Vertexes[-1].Point
else:
last = e.Vertexes[-1].Point
ecurves.append(["line",[getTuples(vert,scale) for vert in verts]])
return "composite", ecurves, getTuples(v,scale), d
else:
# Polyline profile
return "polyline", getTuples(p,scale), getTuples(v,scale), d
return None
def getIfcBrepFacesData(obj,scale=1,sub=False,tessellation=1):
"""getIfcBrepFacesData(obj,[scale,tesselation]): returns a list(0) of lists(1) of lists(2) of lists(3),
list(3) being a list of vertices defining a loop, list(2) describing a face from one or
more loops, list(1) being the whole solid made of several faces, list(0) being the list
of solids inside the object. Scale can indicate a scaling factor. Tesselation is the tesselation
factor to apply on curved faces."""
shape = None
if sub:
if hasattr(obj,"Proxy"):
if hasattr(obj.Proxy,"getSubVolume"):
shape = obj.Proxy.getSubVolume(obj)
if not shape:
if hasattr(obj,"Shape"):
if obj.Shape:
if not obj.Shape.isNull():
#if obj.Shape.isValid():
shape = obj.Shape
elif hasattr(obj,"Terrain"):
if obj.Terrain:
if hasattr(obj.Terrain,"Shape"):
if obj.Terrain.Shape:
if not obj.Terrain.Shape.isNull():
if obj.Terrain.Shape.isValid():
shape = obj.Terrain.Shape
if shape:
import Part
sols = []
if shape.Solids:
dataset = shape.Solids
else:
dataset = shape.Shells
print("Warning! object contains no solids")
for sol in shape.Solids:
s = []
curves = False
for face in sol.Faces:
for e in face.Edges:
if not isinstance(e.Curve,Part.LineSegment):
curves = True
if curves:
tris = sol.tessellate(tessellation)
for tri in tris[1]:
f = []
for i in tri:
f.append(getTuples(tris[0][i],scale))
s.append([f])
else:
for face in sol.Faces:
f = []
f.append(getTuples(face.OuterWire,scale,normal=face.normalAt(0,0),close=False))
for wire in face.Wires:
if wire.hashCode() != face.OuterWire.hashCode():
f.append(getTuples(wire,scale,normal=DraftVecUtils.neg(face.normalAt(0,0)),close=False))
s.append(f)
sols.append(s)
return sols
return None
def getIfcElevation(obj):
"""getIfcElevation(obj): Returns the lowest height (Z coordinate) of this object"""
if obj.isDerivedFrom("Part::Feature"):
b = obj.Shape.BoundBox
return b.ZMin
return 0
def explore(filename=None):
"explore the contents of an ifc file in a Qt dialog"
if not filename:
from PySide import QtGui
filename = QtGui.QFileDialog.getOpenFileName(QtGui.qApp.activeWindow(),'IFC files','*.ifc')
if filename:
filename = filename[0]
if filename:
getConfig()
schema=getSchema()
d = explorer(filename,schema)
d.show()
return d
# IfcReader #############################################
class IfcSchema:
SIMPLETYPES = ["INTEGER", "REAL", "STRING", "NUMBER", "LOGICAL", "BOOLEAN"]
NO_ATTR = ["WHERE", "INVERSE","WR2","WR3", "WR4", "WR5", "UNIQUE", "DERIVE"]
def __init__(self, filename):
self.filename = filename
if not os.path.exists(filename):
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Macro")
p = p.GetString("MacroPath","")
filename = p + os.sep + filename
if not os.path.exists(filename):
raise ImportError("no IFCSchema file found!")
self.file = open(self.filename)
self.data = self.file.read()
self.types = self.readTypes()
self.entities = self.readEntities()
if DEBUG: print("Parsed from schema %s: %s entities and %s types" % (self.filename, len(self.entities), len(self.types)))
def readTypes(self):
"""
Parse all the possible types from the schema,
returns a dictionary Name -> Type
"""
types = {}
for m in re.finditer("TYPE (.*) = (.*);", self.data):
typename, typetype = m.groups()
if typetype in self.SIMPLETYPES:
types[typename] = typetype
else:
types[typename] = "#" + typetype
return types
def readEntities(self):
"""
Parse all the possible entities from the schema,
returns a dictionary of the form:
{ name: {
"supertype": supertype,
"attributes": [{ key: value }, ..]
}}
"""
entities = {}
# Regexes must be greedy to prevent matching outer entity and end_entity strings
# Regexes have re.DOTALL to match newlines
for m in re.finditer("ENTITY (.*?)END_ENTITY;", self.data, re.DOTALL):
entity = {}
raw_entity_str = m.groups()[0]
entity["name"] = re.search("(.*?)[;|\s]", raw_entity_str).groups()[0].upper()
subtypeofmatch = re.search(".*SUBTYPE OF \((.*?)\);", raw_entity_str)
entity["supertype"] = subtypeofmatch.groups()[0].upper() if subtypeofmatch else None
# find the shortest string matched from the end of the entity type header to the
# first occurrence of a NO_ATTR string (when it occurs on a new line)
inner_str = re.search(";(.*?)$", raw_entity_str, re.DOTALL).groups()[0]
attrs_str = min([inner_str.partition("\r\n "+a)[0] for a in self.NO_ATTR])
attrs = []
for am in re.finditer("(.*?) : (.*?);", attrs_str, re.DOTALL):
name, attr_type = [s.replace("\r\n\t","") for s in am.groups()]
attrs.append((name, attr_type))
entity["attributes"] = attrs
entities[entity["name"]] = entity
return entities
def getAttributes(self, name):
"""
Get all attributes af an entity, including supertypes
"""
ent = self.entities[name]
attrs = []
while ent != None:
this_ent_attrs = copy.copy(ent["attributes"])
this_ent_attrs.reverse()
attrs.extend(this_ent_attrs)
ent = self.entities.get(ent["supertype"], None)
attrs.reverse()
return attrs
def capitalize(self, name):
"returns a capitalized version of a type"
if name.upper() in self.data.upper():
i1 = self.data.upper().index(name.upper())
i2 = i1 + len(name)
name = self.data[i1:i2]
return name
class IfcFile:
"""
Parses an ifc file given by filename, entities can be retrieved by name and id
The whole file is stored in a dictionary (in memory)
"""
entsById = {}
entsByName = {}
def __init__(self, filename,schema):
self.filename = filename
self.schema = IfcSchema(schema)
self.file = open(self.filename)
self.entById, self.entsByName, self.header = self.read()
self.file.close()
if DEBUG: print("Parsed from file %s: %s entities" % (self.filename, len(self.entById)))
def getEntityById(self, id):
return self.entById.get(id, None)
def getEntitiesByName(self, name):
return self.entsByName.get(name, None)
def read(self):
"""
Returns 2 dictionaries, entById and entsByName
"""
entById = {}
entsByName = {}
header = 'HEADER '
readheader = False
for line in self.file:
e = self.parseLine(line)
if e:
entById[int(e["id"])] = e
ids = e.get(e["name"],[])
ids.append(e["id"])
entsByName[e["name"]] = list(set(ids))
elif 'HEADER' in line:
readheader = True
elif readheader:
if 'ENDSEC' in line:
readheader = False
else:
header += line
return [entById, entsByName, header]
def parseLine(self, line):
"""
Parse a line
"""
m = IFCLINE_RE.search(line) # id,name,attrs
if m:
id, name, attrs = m.groups()
id = id.strip()
name = name.strip()
attrs = attrs.strip()
else:
return False
return {"id": id, "name": name, "attributes": self.parseAttributes(name, attrs)}
def parseAttributes(self, ent_name, attrs_str):
"""
Parse the attributes of a line
"""
parts = []
lastpos = 0
while lastpos < len(attrs_str):
newpos = self.nextString(attrs_str, lastpos)
parts.extend(self.parseAttribute(attrs_str[lastpos:newpos-1]))
lastpos = newpos
schema_attributes = self.schema.getAttributes(ent_name)
assert len(schema_attributes) == len(parts), \
"Expected %s attributes, got %s (entity: %s" % \
(len(schema_attributes), len(parts), ent_name)
attribute_names = [a[0] for a in schema_attributes]
return dict(zip(attribute_names, parts))
def parseAttribute(self, attr_str):
"""
Map a single attribute to a python type (recursively)
"""
parts = []
lastpos = 0
while lastpos < len(attr_str):
newpos = self.nextString(attr_str, lastpos)
s = attr_str[lastpos:newpos-1]
if (s[0] == "(" and s[-1] == ")"): # list, recurse
parts.append(self.parseAttribute(s[1:-1]))
else:
try:
parts.append(float(s)) # number, any kind
except ValueError:
if s[0] == "'" and s[-1] == "'": # string
parts.append(s[1:-1])
elif s == "$":
parts.append(None)
else:
parts.append(s) # ref, enum or other
lastpos = newpos
return parts
def nextString(self, s, start):
"""
Parse the data part of a line
"""
parens = 0
quotes = 0
for pos in range(start,len(s)):
c = s[pos]
if c == "," and parens == 0 and quotes == 0:
return pos+1
elif c == "(" and quotes == 0:
parens += 1
elif c == ")" and quotes == 0:
parens -= 1
elif c == "\'" and quotes == 0:
quotes = 1
elif c =="\'" and quotes == 1:
quotes = 0
return len(s)+1
class IfcEntity:
"a container for an IFC entity"
def __init__(self,ent,doc=None):
self.data = ent
self.id = int(ent['id'])
self.type = ent['name'].upper().strip(",[]()")
self.attributes = ent['attributes']
self.doc = doc
def __repr__(self):
return str(self.id) + ' : ' + self.type + ' ' + str(self.attributes)
def getProperties(self):
return self.doc.find('IFCRELDEFINESBYPROPERTIES','RelatedObjects',self)
def getProperty(self,propName):
"finds the value of the given property or quantity in this object, if exists"
propsets = self.doc.find('IFCRELDEFINESBYPROPERTIES','RelatedObjects',self)
if not propsets: return None
propset = []
for p in propsets:
if hasattr(p.RelatingPropertyDefinition,"HasProperties"):
propset.extend(p.RelatingPropertyDefinition.HasProperties)
elif hasattr(p.RelatingPropertyDefinition,"Quantities"):
propset.extend(p.RelatingPropertyDefinition.Quantities)
for prop in propset:
if prop.Name == propName:
print("found valid",prop)
if hasattr(prop,"LengthValue"):
return prop.LengthValue
elif hasattr(prop,"AreaValue"):
return prop.AreaValue
elif hasattr(prop,"VolumeValue"):
return prop.VolumeValue
elif hasattr(prop,"NominalValue"):
return prop.NominalValue
return None
def getAttribute(self,attr):
"returns the value of the given attribute, if exists"
if hasattr(self,attr):
return self.__dict__[attr]
return None
class IfcDocument:
"an object representing an IFC document"
def __init__(self,filename,schema="IFC2X3_TC1.exp"):
f = IfcFile(filename,schema)
self.filename = filename
self.data = f.entById
self.Entities = {0:f.header}
for k,e in self.data.iteritems():
eid = int(e['id'])
self.Entities[eid] = IfcEntity(e,self)
if DEBUG: print(len(self.Entities),"entities created. Creating attributes...")
for k,ent in self.Entities.iteritems():
if DEBUG: print("attributing entity ",ent)
if hasattr(ent,"attributes"):
for k,v in ent.attributes.iteritems():
if DEBUG: print("parsing attribute: ",k," value ",v)
if isinstance(v,str):
val = self.__clean__(v)
elif isinstance(v,list):
val = []
for item in v:
if isinstance(item,str):
val.append(self.__clean__(item))
else:
val.append(item)
else:
val = v
setattr(ent,k.strip(),val)
if DEBUG: print("Document successfully created")
def __clean__(self,value):
"turns an attribute value into something usable"
try:
val = value.strip(" ()'")
if val[:3].upper() == "IFC":
if "IFCTEXT" in val.upper():
l = val.split("'")
if len(l) == 3: val = l[1]
elif "IFCBOOLEAN" in value.upper():
l = val.split(".")
if len(l) == 3: val = l[1]
if val.upper() == "F": val = False
elif val.upper() == "T": val = True
elif "IFCREAL" in val.upper():
l = val.split("(")
if len(l) == 2: val = float(l[1].strip(")"))
else:
if '#' in val:
if "," in val:
val = val.split(",")
l = []
for subval in val:
if '#' in subval:
s = subval.strip(" #")
if DEBUG: print("referencing ",s," : ",self.getEnt(int(s)))
l.append(self.getEnt(int(s)))
val = l
else:
val = val.strip()
val = val.replace("#","")
if DEBUG: print("referencing ",val," : ",self.getEnt(int(val)))
val = self.getEnt(int(val))
if not val:
val = value
except:
if DEBUG: print("error parsing attribute",value)
val = value
return val
def __repr__(self):
return "IFC Document: " + self.filename + ', ' + str(len(self.Entities)) + " entities "
def getEnt(self,ref):
"gets an entity by id number, or a list of entities by type"
if isinstance(ref,int):
if ref in self.Entities:
return self.Entities[ref]
elif isinstance(ref,str):
l = []
ref = ref.upper()
for k,ob in self.Entities.iteritems():
if hasattr(ob,"type"):
if ob.type == ref:
l.append(ob)
return l
return None
def search(self,pat):
"searches entities types for partial match"
l = []
pat = pat.upper()
for k,ob in self.Entities.iteritems():
if hasattr(ob,"type"):
if pat in ob.type:
if not ob.type in l:
l.append(ob.type)
return l
def find(self,pat1,pat2=None,pat3=None):
'''finds objects in the current IFC document.
arguments can be of the following form:
- (pattern): returns object types matching the given pattern (same as search)
- (type,property,value): finds, in all objects of type "type", those whose
property "property" has the given value
'''
if pat3:
bobs = self.getEnt(pat1)
obs = []
for bob in bobs:
if hasattr(bob,pat2):
if bob.getAttribute(pat2) == pat3:
obs.append(bob)
return obs
elif pat1:
ll = self.search(pat1)
obs = []
for l in ll:
obs.extend(self.getEnt(l))
return obs
return None
def explorer(filename,schema="IFC2X3_TC1.exp"):
"returns a PySide dialog showing the contents of an IFC file"
from PySide import QtCore,QtGui
ifc = IfcDocument(filename,schema)
schema = IfcSchema(schema)
tree = QtGui.QTreeWidget()
tree.setColumnCount(3)
tree.setWordWrap(True)
tree.header().setDefaultSectionSize(60)
tree.header().resizeSection(0,60)
tree.header().resizeSection(1,30)
tree.header().setStretchLastSection(True)
tree.headerItem().setText(0, "ID")
tree.headerItem().setText(1, "")
tree.headerItem().setText(2, "Item and Properties")
bold = QtGui.QFont()
bold.setWeight(75)
bold.setBold(True)
#print(ifc.Entities)
for i in ifc.Entities.keys():
e = ifc.Entities[i]
item = QtGui.QTreeWidgetItem(tree)
if hasattr(e,"id"):
item.setText(0,str(e.id))
if e.type in ["IFCWALL","IFCWALLSTANDARDCASE"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Wall_Tree.svg"))
elif e.type in ["IFCCOLUMN","IFCBEAM","IFCSLAB","IFCFOOTING"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Structure_Tree.svg"))
elif e.type in ["IFCSITE"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Site_Tree.svg"))
elif e.type in ["IFCBUILDING"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Building_Tree.svg"))
elif e.type in ["IFCSTOREY"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Floor_Tree.svg"))
elif e.type in ["IFCWINDOW"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Window_Tree.svg"))
elif e.type in ["IFCROOF"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Roof_Tree.svg"))
elif e.type in ["IFCEXTRUDEDAREASOLID","IFCCLOSEDSHELL"]:
item.setIcon(1,QtGui.QIcon(":icons/Tree_Part.svg"))
elif e.type in ["IFCFACE"]:
item.setIcon(1,QtGui.QIcon(":icons/Draft_SwitchMode.svg"))
elif e.type in ["IFCARBITRARYCLOSEDPROFILEDEF","IFCPOLYLOOP"]:
item.setIcon(1,QtGui.QIcon(":icons/Draft_Draft.svg"))
item.setText(2,str(schema.capitalize(e.type)))
item.setFont(2,bold);
for a in e.attributes.keys():
if hasattr(e,a):
if not a.upper() in ["ID", "GLOBALID"]:
v = getattr(e,a)
if isinstance(v,IfcEntity):
t = "Entity #" + str(v.id) + ": " + str(v.type)
elif isinstance(v,list):
t = ""
else:
t = str(v)
t = " " + str(a) + " : " + str(t)
item = QtGui.QTreeWidgetItem(tree)
item.setText(2,str(t))
if isinstance(v,list):
for vi in v:
if isinstance(vi,IfcEntity):
t = "Entity #" + str(vi.id) + ": " + str(vi.type)
else:
t = vi
t = " " + str(t)
item = QtGui.QTreeWidgetItem(tree)
item.setText(2,str(t))
d = QtGui.QDialog()
d.setObjectName("IfcExplorer")
d.setWindowTitle("Ifc Explorer")
d.resize(640, 480)
layout = QtGui.QVBoxLayout(d)
layout.addWidget(tree)
return d
# IfcWriter ########################################
class _tempEntityHolder:
"""a temporary object to store entity references
to be made into something nicer later..."""
def __init__(self):
self.refs = []
holder = _tempEntityHolder()
def uid():
"""returns a suitable GlobalID"""
u = str(uuid.uuid4())[:22]
u = u.replace("-","_")
return u
def now(string=False):
"returns a suitable Ifc Time"
if string:
return time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime())
else:
return int(time.time())
def getPropertyNames(entity):
"""getPropertyNames(entity): Returns a dictionary with
the numbers and names of the pythonproperties available for
this entity"""
ents = {}
if hasattr(entity,"get_argument_count"):
l = entity.get_argument_count()
else:
l = len(entity)
for i in range(l):
ents[i] = entity.get_argument_name(i)
return ents
def getTuple(vec):
"""getTuple(vec): returns a tuple from other coordinate
structures: tuple, list, 3d vector, or occ vertex"""
def fmt(t):
t = float(t)
t = round(t,PRECISION)
return t
if isinstance(vec,tuple):
return tuple([fmt(v) for v in vec])
elif isinstance(vec,list):
return tuple([fmt(v) for v in vec])
elif hasattr(vec,"x") and hasattr(vec,"y") and hasattr(vec,"z"):
return (fmt(vec.x),fmt(vec.y),fmt(vec.z))
elif hasattr(vec,"X") and hasattr(vec,"Y") and hasattr(vec,"Z"):
return (fmt(vec.X),fmt(vec.Y),fmt(vec.Z))
def getValueAndDirection(vec):
"""getValueAndDirection(vec): returns a length and a tuple
representing a normalized vector from a tuple"""
vec = getTuple(vec)
length = round(math.sqrt(vec[0]**2 + vec[1]**2 + vec[2]**2),PRECISION)
ratio = 1/length
x = round(vec[0]*ratio,PRECISION)
y = round(vec[1]*ratio,PRECISION)
z = round(vec[2]*ratio,PRECISION)
normal = (x,y,z)
return length,normal
def create(ifcdoc=None,ifcname=None,arguments=[]):
"""create(ifcdoc,ifcname,[arguments]):creates an entity
of the given name in the given document and optionally
gives it an ordered list of arguments"""
if hasattr(ifcw,"Entity"):
entity = ifcw.Entity(ifcname)
else:
entity = ifcw.entity_instance(ifcname)
if ifcdoc:
ifcdoc.add(entity)
# this is a temporary hack while ifcopenshell has no ref counting
holder.refs.append(entity)
if not isinstance(arguments,list):
arguments = [arguments]
for i in range(len(arguments)):
arg = arguments[i]
if isinstance(arg,tuple):
if len(arg) in [2,3]:
if hasattr(ifcw,"Doubles"):
arg = ifcw.Doubles(arg)
else:
arg = ifcw.doubles(arg)
entity.set_argument(i,arg)
return entity
class IfcWriter(object):
"""IfcWriter([filepath,name,owner,organization,application,version])
Creates an empty IFC document."""
def __init__(self,filepath="",name="",owner="",organization="",application="Python IFC exporter",version="0.0"):
if hasattr(ifcw,"IfcFile"):
self._fileobject = ifcw.IfcFile()
else:
self._fileobject = ifcw.file()
self._person = create(self._fileobject,"IfcPerson",[None,None,"",None,None,None,None,None])
self._org = create(self._fileobject,"IfcOrganization",[None,"",None,None,None])
pno = create(self._fileobject,"IfcPersonAndOrganization",[self._person,self._org,None])
app = create(self._fileobject,"IfcApplication",[self._org,version,application,uid()])
self._owner = create(self._fileobject,"IfcOwnerHistory",[pno,app,None,"ADDED",None,pno,app,now()])
axp = self.addPlacement(local=False)
dim0 = create(self._fileobject,"IfcDirection",getTuple((0,1,0)))
self._repcontext = create(self._fileobject,"IfcGeometricRepresentationContext",['Plan','Model',3,1.E-05,axp,dim0])
dim1 = create(self._fileobject,"IfcDimensionalExponents",[0,0,0,0,0,0,0])
dim2 = create(self._fileobject,"IfcSIUnit",[dim1,"LENGTHUNIT","MILLI","METRE"])
dim3 = create(self._fileobject,"IfcSIUnit",[dim1,"AREAUNIT",None,"SQUARE_METRE"])
dim4 = create(self._fileobject,"IfcSIUnit",[dim1,"VOLUMEUNIT",None,"CUBIC_METRE"])
dim6 = create(self._fileobject,"IfcSIUnit",[dim1,"PLANEANGLEUNIT",None,"RADIAN"])
dim7 = create(None,"IfcPlaneAngleMeasure",[1.745E-2])
dim8 = create(self._fileobject,"IfcMeasureWithUnit",[dim7,dim6])
dim9 = create(self._fileobject,"IfcConversionBasedUnit",[dim1,"PLANEANGLEUNIT","DEGREE",dim8])
units = create(self._fileobject,"IfcUnitAssignment",[[dim2,dim3,dim4,dim9]])
self.Project = create(self._fileobject,"IfcProject",[uid(),self._owner,None,None,None,None,None,[self._repcontext],units])
self.Site = None
self._storeyRelations = {}
self.BuildingProducts = []
self.Storeys = []
self.Buildings = []
self.FilePath = filepath
self.Owner = owner
self.Organization = organization
self.Name = name
def __repr__(self):
return "IFC document " + self.Name #+ " containing " + str(len(holder)) + " entities"
def __setattr__(self,key,value):
if value:
if key == "Owner":
self._person.set_argument(2,str(value))
elif key == "Organization":
self._org.set_argument(1,str(value))
elif key == "Name":
self.Project.set_argument(2,str(value))
self.__dict__.__setitem__(key,value)
def findByName(self,ifctype,name):
"finds an entity of a given ifctype by name"
objs = self._fileobject.by_type(ifctype)
for obj in objs:
if hasattr(obj,"get_argument_count"):
l = obj.get_argument_count()
else:
l = len(obj)
for i in range(l):
if obj.get_argument_name(i) == "Name":
if obj.get_argument(i) == name:
return obj
return None
def write(self,fp=None):
"writes the document to its file"
if fp:
path = fp
else:
path = self.FilePath
if path:
try:
self._fileobject.write(path)
if APPLYFIX:
print("IfcWriter: Applying fix...")
self._fix(path)
except:
print("IfcWriter: Error writing to "+path)
else:
print("IfcWriter: Successfully written to "+path)
else:
print("IfcWriter: Error: File path is not defined, unable to save")
def _fix(self,path):
"hack to fix early bugs in ifcopenshell"
import os
if os.path.exists(path):
f = pyopen(path,"rb")
lines = []
for l in f.readlines():
if "(=IFC" in l:
# adding an ifc entity without ID adds an unwanted = sign
l = l.replace("(=IFC","(IFC")
elif "IFCSIUNIT" in l:
# no way to insert * character
l = l.replace("IFCSIUNIT(#12,","IFCSIUNIT(*,")
lines.append(l)
f.close()
f = pyopen(path,"wb")
for l in lines:
f.write(l)
f.close()
def union(self,solids):
"""union(solids): creates a boolean union between all the solids of the list"""
if len(solids) == 1:
return solids[0]
else:
s1 = solids.pop(0)
s2 = solids.pop(0)
base = create(self._fileobject,"IfcBooleanResult",["UNION",s1,s2])
for s in solids:
base = create(self._fileobject,"IfcBooleanResult",["UNION",base,s])
return base
def addPlacement(self,reference=None,origin=(0,0,0),xaxis=(1,0,0),zaxis=(0,0,1),local=True,flat=False):
"""addPlacement([reference,origin,xaxis,zaxis,local]): adds a placement. origin,
xaxis and zaxis can be either tuples or 3d vectors. If local is False, a global
placement is returned, otherwise a local one."""
if flat:
xvc = create(self._fileobject,"IfcDirection",getTuple(xaxis)[:2])
ovc = create(self._fileobject,"IfcCartesianPoint",getTuple(origin)[:2])
gpl = create(self._fileobject,"IfcAxis2Placement2D",[ovc,xvc])
else:
xvc = create(self._fileobject,"IfcDirection",getTuple(xaxis))
zvc = create(self._fileobject,"IfcDirection",getTuple(zaxis))
ovc = create(self._fileobject,"IfcCartesianPoint",getTuple(origin))
gpl = create(self._fileobject,"IfcAxis2Placement3D",[ovc,zvc,xvc])
if local:
lpl = create(self._fileobject,"IfcLocalPlacement",[reference,gpl])
return lpl
else:
return gpl
def addSite(self,placement=None,name="Site",description=None,latitude=None,longitude=None,elevation=None,landtitlenumber=None,address=None):
"""makeSite(ifcdoc,project,owner,[placement,name,description]): creates a site
in the given ifc document"""
if self.Site:
return
if not placement:
placement = self.addPlacement()
self.Site = create(self._fileobject,"IfcSite",[uid(),self._owner,str(name),description,None,placement,None,None,"ELEMENT",latitude,longitude,elevation,landtitlenumber,address])
self._relate(self.Project,self.Site)
def addBuilding(self,placement=None,name="Default building",description=None):
"""addBuilding([placement,name,description]): adds a building"""
if not placement:
placement = self.addPlacement()
if not self.Site:
self.addSite()
bdg = create(self._fileobject,"IfcBuilding",[uid(),self._owner,str(name),description,None,placement,None,None,"ELEMENT",None,None,None])
self._relate(self.Site,bdg)
self.Buildings.append(bdg)
return bdg
def addStorey(self,building=None,placement=None,name="Default storey",description=None):
"""addStorey([building,placement,name,description]): adds a storey"""
if not placement:
placement = self.addPlacement()
sto = create(self._fileobject,"IfcBuildingStorey",[uid(),self._owner,str(name),description,None,placement,None,None,"ELEMENT",None])
if not building:
if self.Buildings:
building = self.Buildings[0]
else:
building = self.addBuilding()
self._relate(building,sto)
self.Storeys.append(sto)
return sto
def addGroup(self,entities,name="Default group",description=None):
"""addGroup(entities,[name,description]): adds a group with the given entities"""
if not isinstance(entities,list):
entities = [entities]
gro = create(self._fileobject,"IfcGroup",[uid(),self._owner,str(name),description,None])
rel = create(self._fileobject,"IfcRelAssignsToGroup",[uid(),self._owner,str(name)+"-relation",None,entities,"PRODUCT",gro])
return gro
def _relate(self,container,entities):
"""relate(container,entities): relates the given entities to the given
container"""
if not isinstance(entities,list):
entities = [entities]
if container.is_a("IfcBuildingStorey"):
sid = container.get_argument(0)
if sid in self._storeyRelations:
prods = self._storeyRelations[sid].get_argument(4)
self._storeyRelations[sid].set_argument(4,prods+entities)
else:
rel = create(self._fileobject,"IfcRelContainedInSpatialStructure",[uid(),self._owner,'StoreyLink','',entities,container])
self._storeyRelations[sid] = rel
else:
if entities[0].is_a("IfcOpeningElement"):
create(self._fileobject,"IfcRelVoidsElement",[uid(),self._owner,'Opening','',container,entities[0]])
else:
create(self._fileobject,"IfcRelAggregates",[uid(),self._owner,'Relationship','',container,entities])
def addProduct(self,elttype,shapes,storey=None,placement=None,name="Unnamed element",description=None,extra=None):
"""addProduct(elttype,representations,[storey,placement,name,description,extra]): creates an element of the given type
(IfcWall, IfcBeam, etc...) with the given attributes, plus the given extra attributes."""
elttype = str(elttype)
if not extra:
extra = []
if not description:
description = None
if not placement:
placement = self.addPlacement()
representations = self.addRepresentations(shapes)
prd = create(self._fileobject,"IfcProductDefinitionShape",[None,None,representations])
try:
elt = create(self._fileobject,elttype,[uid(),self._owner,name,description,None,placement,prd,None]+extra)
except:
print("unable to create an ",elttype, " with attributes: ",[uid(),self._owner,str(name),description,None,placement,prd,None]+extra)
try:
if hasattr(ifcw,"Entity"):
o = ifcw.Entity(elttype)
else:
o = ifcw.entity_instance(elttype)
print("supported attributes are: ")
print(getPropertyNames(o))
except:
print("unable to create an element of type '"+elttype+"'")
print("WARNING: skipping object '"+name+"' of type "+elttype)
return None
self.BuildingProducts.append(elt)
if not storey:
if self.Storeys:
storey = self.Storeys[0]
else:
storey = self.addStorey()
self._relate(storey,elt)
return elt
def addRepresentations(self,shapes):
"""addRepresentations(shapes,[solidType]): creates a representation from the given shape"""
solidType = "Brep"
if not isinstance(shapes,list):
if shapes.is_a("IfcExtrudedAreaSolid"):
solidType = "SweptSolid"
shapes = [shapes]
reps = [create(self._fileobject,"IfcShapeRepresentation",[self._repcontext,'Body',solidType,[shape for shape in shapes]])]
return reps
def addColor(self,rgb,rep):
"""addColor(rgb,rep): adds a RGB color definition tuple (float,float,float) to a given representation"""
col = create(self._fileobject,"IfcColourRgb",[None]+list(rgb))
ssr = create(self._fileobject,"IfcSurfaceStyleRendering",[col,None,None,None,None,None,None,None,"FLAT"])
iss = create(self._fileobject,"IfcSurfaceStyle",[None,"BOTH",[ssr]])
psa = create(self._fileobject,"IfcPresentationStyleAssignment",[[iss]])
isi = create(self._fileobject,"IfcStyledItem",[rep,[psa],None])
return isi
def addProfile(self,ifctype,data,curvetype="AREA"):
"""addProfile(ifctype,data): creates a 2D profile of the given type, with the given
data as arguments, which must be formatted correctly according to the type."""
# Expected ifctype and corresponding data formatting:
# IfcPolyLine: [ (0,0,0), (2,1,0), (3,3,0) ] # list of points
# IfcCompositeCurve: [ ["line",[ (0,0,0), (2,1,0) ] ], # list of points
# ["arc", (0,0,0), 15, [0.76, 3.1416], True, "PARAMETER"] # center, radius, [trim1, trim2], SameSense, trimtype
# ... ]
# IfcCircleProfileDef: [ (0,0,0), 15 ] # center, radius
# IfcEllipseProfileDef: [ (0,0,0), 15, 7 ] # center, radiusX, radiusY
if ifctype == "IfcPolyline":
pts = [create(self._fileobject,"IfcCartesianPoint",getTuple(p)[:2]) for p in data]
pol = create(self._fileobject,"IfcPolyline",[pts])
profile = create(self._fileobject,"IfcArbitraryClosedProfileDef",[curvetype,None,pol])
elif ifctype == "IfcCompositeCurve":
curves = []
for curve in data:
cur = None
if curve[0] == "line":
pts = [create(self._fileobject,"IfcCartesianPoint",getTuple(p)[:2]) for p in curve[1]]
cur = create(self._fileobject,"IfcPolyline",[pts])
elif curve[0] == "arc":
pla = self.addPlacement(origin=curve[1],local=False,flat=True)
cir = create(self._fileobject,"IfcCircle",[pla,curve[2]])
if curve[5] == "CARTESIAN":
# BUGGY! Impossible to add cartesian points as "embedded" entity
trim1 = create(None,"IfcCartesianPoint",getTuple(curve[3][0])[:2])
trim2 = create(None,"IfcCartesianPoint",getTuple(curve[3][1])[:2])
else:
trim1 = create(None,"IfcParameterValue",[curve[3][0]])
trim2 = create(None,"IfcParameterValue",[curve[3][1]])
cur = create(self._fileobject,"IfcTrimmedCurve",[cir,[trim1],[trim2],curve[4],curve[5]])
if cur:
seg = create(self._fileobject,"IfcCompositeCurveSegment",["CONTINUOUS",True,cur])
curves.append(seg)
ccu = create(self._fileobject,"IfcCompositeCurve",[curves,False])
profile = create(self._fileobject,"IfcArbitraryClosedProfileDef",[curvetype,None,ccu])
else:
if not isinstance(data,list):
data = [data]
p = self.addPlacement(local=False,flat=True)
profile = create(self._fileobject,ifctype,[curvetype,None,p]+data)
return profile
def addExtrusion(self,profile,extrusion,placement=None):
"""addExtrusion(profile,extrusion,[placement]): makes an
extrusion of the given polyline with the given extrusion vector"""
if not placement:
placement = self.addPlacement(local=False)
value,norm = getValueAndDirection(extrusion)
edir = create(self._fileobject,"IfcDirection",[norm])
solid = create(self._fileobject,"IfcExtrudedAreaSolid",[profile,placement,edir,value])
return solid
def addExtrudedPolyline(self,points,extrusion,placement=None,color=None):
"""addExtrudedPolyline(points,extrusion,[placement,color]): makes an extruded polyline
from the given points and the given extrusion vector"""
pol = self.addProfile("IfcPolyline",points)
if not placement:
placement = self.addPlacement(local=False)
exp = self.addExtrusion(pol,extrusion,placement)
if color:
self.addColor(color,exp)
return exp
def addExtrudedCircle(self,data,extrusion,placement=None,color=None):
"""addExtrudedCircle(data,extrusion,[placement,color]): makes an extruded circle
from the given data (center,radius) and the given extrusion vector"""
cir = self.addProfile("IfcCircleProfileDef",data[1])
if not placement:
placement = self.addPlacement(origin=data[0],local=False)
exp = self.addExtrusion(cir,extrusion,placement)
if color:
self.addColor(color,exp)
return exp
def addExtrudedEllipse(self,data,extrusion,placement=None,color=None):
"""addExtrudedEllipse(data,extrusion,[placement,color]): makes an extruded ellipse
from the given data (center,radiusx,radiusy) and the given extrusion vector"""
cir = self.addProfile("IfcEllipseProfileDef",[data[1],data[2]])
if not placement:
placement = self.addPlacement(origin=data[0],local=False)
exp = self.addExtrusion(cir,extrusion,placement)
if color:
self.addColor(color,exp)
return exp
def addExtrudedCompositeCurve(self,curves,extrusion,placement=None,color=None):
"""addExtrudedCompositeCurve(curves,extrusion,[placement,color]): makes an extruded polyline
from the given curves and the given extrusion vector"""
if not placement:
placement = self.addPlacement(local=False)
ccu = self.addProfile("IfcCompositeCurve",curves)
exp = self.addExtrusion(ccu,extrusion,placement)
if color:
self.addColor(color,exp)
return exp
def addFace(self,face):
"""addFace(face): creates a face from the given face data (a list of lists of points).
The first is the outer wire, the next are optional inner wires. They must be reversed in order"""
ifb = []
idx = 0
for f in face:
pts = []
for p in f:
#print(p)
if p in self.fpoints:
#print(self.fpoints.index(p))
#print(self.frefs)
pts.append(self.frefs[self.fpoints.index(p)])
else:
pt = create(self._fileobject,"IfcCartesianPoint",getTuple(p))
pts.append(pt)
self.fpoints.append(p)
self.frefs.append(pt)
#print(pts)
loop = create(self._fileobject,"IfcPolyLoop",[pts])
if idx == 0:
fb = create(self._fileobject,"IfcFaceOuterBound",[loop,True])
else:
fb = create(self._fileobject,"IfcFaceBound",[loop,True])
ifb.append(fb)
idx += 1
iface = create(self._fileobject,"IfcFace",[ifb])
return iface
def addFacetedBrep(self,faces,color=None):
"""addFacetedBrep(self,faces,[color]): creates a faceted brep object from the given list
of faces (each face is a list of lists of points, inner wires are reversed)"""
self.fpoints = []
self.frefs = []
#print("adding ",len(faces)," faces")
#print(faces)
ifaces = [self.addFace(face) for face in faces]
sh = create(self._fileobject,"IfcClosedShell",[ifaces])
brp = create(self._fileobject,"IfcFacetedBrep",[sh])
if color:
self.addColor(color,brp)
return brp
| bblacey/FreeCAD-MacOS-CI | src/Mod/Arch/importIFClegacy.py | Python | lgpl-2.1 | 98,719 |
"""Base classes to configure an OSPF daemon"""
from ipaddress import ip_interface, IPv4Network
from typing import Sequence, List
from ipmininet.link import IPIntf
from ipmininet.overlay import Overlay
from ipmininet.utils import L3Router
from .utils import ConfigDict
from .zebra import QuaggaDaemon, Zebra
class OSPFArea(Overlay):
"""An overlay to group OSPF links and routers by area"""
def __init__(self, area: str, routers: Sequence[str] = (),
links: Sequence[str] = (), **props):
""":param area: the area for this overlay
:param routers: the set of routers for which all their interfaces
belong to that area
:param links: individual links belonging to this area"""
super().__init__(nodes=routers, links=links, nprops=props)
self.area = area
@property
def area(self) -> str:
return self.links_properties['igp_area']
@area.setter
def area(self, x: str):
self.links_properties['igp_area'] = x
# Also set node property so we can use it for the loopback interface
self.nodes_properties['igp_area'] = x
def apply(self, topo):
# Add all links for the routers
for r in self.nodes:
self.add_link(*[(r, x) for x in topo.g[r].keys()])
super().apply(topo)
def __str__(self):
return '<OSPF area %s>' % self.area
class OSPF(QuaggaDaemon):
"""This class provides a simple configuration for an OSPF daemon.
It advertizes one network per interface (the primary one), and set
interfaces not facing another L3Router to passive"""
NAME = 'ospfd'
DEPENDS = (Zebra,)
KILL_PATTERNS = (NAME,)
def __init__(self, node, *args, **kwargs):
super().__init__(node=node, *args, **kwargs)
def build(self):
cfg = super().build()
cfg.redistribute = self.options.redistribute
interfaces = self._node.intfList()
cfg.interfaces = self._build_interfaces(interfaces)
cfg.networks = self._build_networks(interfaces)
return cfg
@staticmethod
def _build_networks(interfaces: List[IPIntf]) -> List['OSPFNetwork']:
"""Return the list of OSPF networks to advertize from the list of
active OSPF interfaces"""
# Check that we have at least one IPv4 network on that interface ...
return [OSPFNetwork(domain=ip_interface('%s/%s' % (i.ip, i.prefixLen)),
area=i.igp_area) for i in interfaces if i.ip]
def _build_interfaces(self, interfaces: List[IPIntf]) -> List[ConfigDict]:
"""Return the list of OSPF interface properties from the list of
active interfaces"""
return [ConfigDict(description=i.describe,
name=i.name,
# Is the interface between two routers?
active=self.is_active_interface(i),
priority=i.get('ospf_priority',
self.options.priority),
dead_int=i.get('ospf_dead_int',
self.options.dead_int),
hello_int=i.get('ospf_hello_int',
self.options.hello_int),
cost=i.igp_metric,
# Is the interface forcefully disabled?
passive=i.get('igp_passive', False))
for i in interfaces]
def set_defaults(self, defaults):
""":param debug: the set of debug events that should be logged
:param dead_int: Dead interval timer
:param hello_int: Hello interval timer
:param priority: priority for the interface, used for DR election
:param redistribute: set of OSPFRedistributedRoute sources"""
defaults.dead_int = 'minimal hello-multiplier 5'
defaults.hello_int = 1
defaults.priority = 10
defaults.redistribute = []
super().set_defaults(defaults)
@staticmethod
def is_active_interface(itf) -> bool:
"""Return whether an interface is active or not for the OSPF daemon"""
if itf.broadcast_domain is None:
return False
return any(L3Router.is_l3router_intf(i) for i in itf.broadcast_domain
if i != itf)
class OSPFNetwork:
"""A class holding an OSPF network properties"""
def __init__(self, domain: IPv4Network, area: str):
self.domain = domain
self.area = area
class OSPFRedistributedRoute:
"""A class representing a redistributed route type in OSPF"""
def __init__(self, subtype: str, metric_type=1, metric=1000):
self.subtype = subtype
self.metric_type = metric_type
self.metric = metric
| oliviertilmans/ipmininet | ipmininet/router/config/ospf.py | Python | gpl-2.0 | 4,813 |
def test():
print('This is printed')
return
##print('This is not')
x =test()
def func2():
return 1
| quran1987/Test | test.py | Python | gpl-2.0 | 127 |
"""
Module rendering
"""
import hashlib
import json
import logging
from collections import OrderedDict
from functools import partial
import newrelic.agent
from capa.xqueue_interface import XQueueInterface
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.context_processors import csrf
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from edx_proctoring.services import ProctoringService
from eventtracking import tracker
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey, CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from requests.auth import HTTPBasicAuth
from xblock.core import XBlock
from xblock.django.request import django_to_webob_request, webob_to_django_response
from xblock.exceptions import NoSuchHandlerError, NoSuchViewError
from xblock.reference.plugins import FSService
import static_replace
from courseware.access import has_access, get_user_role
from courseware.entrance_exams import (
user_must_complete_entrance_exam,
user_has_passed_entrance_exam
)
from courseware.masquerade import (
MasqueradingKeyValueStore,
filter_displayed_blocks,
is_masquerading_as_specific_student,
setup_masquerade,
)
from courseware.model_data import DjangoKeyValueStore, FieldDataCache
from edxmako.shortcuts import render_to_string
from lms.djangoapps.grades.signals.signals import SCORE_PUBLISHED
from lms.djangoapps.lms_xblock.field_data import LmsFieldData
from lms.djangoapps.lms_xblock.models import XBlockAsidesConfig
from lms.djangoapps.lms_xblock.runtime import LmsModuleSystem
from lms.djangoapps.verify_student.services import VerificationService, ReverificationService
from openedx.core.djangoapps.bookmarks.services import BookmarksService
from openedx.core.djangoapps.crawlers.models import CrawlersConfig
from openedx.core.djangoapps.credit.services import CreditService
from openedx.core.djangoapps.util.user_utils import SystemUser
from openedx.core.lib.xblock_utils import (
replace_course_urls,
replace_jump_to_id_urls,
replace_static_urls,
add_staff_markup,
wrap_xblock,
request_token as xblock_request_token,
)
from openedx.core.lib.url_utils import unquote_slashes, quote_slashes
from student.models import anonymous_id_for_user, user_by_anonymous_id
from student.roles import CourseBetaTesterRole
from util import milestones_helpers
from util.json_request import JsonResponse
from util.model_utils import slugify
from util.sandboxing import can_execute_unsafe_code, get_python_lib_zip
from xblock.runtime import KvsFieldData
from xblock_django.user_service import DjangoXBlockUserService
from xmodule.contentstore.django import contentstore
from xmodule.error_module import ErrorDescriptor, NonStaffErrorDescriptor
from xmodule.exceptions import NotFoundError, ProcessingError
from xmodule.lti_module import LTIModule
from xmodule.mixin import wrap_with_license
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.x_module import XModuleDescriptor
from .field_overrides import OverrideFieldData
log = logging.getLogger(__name__)
if settings.XQUEUE_INTERFACE.get('basic_auth') is not None:
REQUESTS_AUTH = HTTPBasicAuth(*settings.XQUEUE_INTERFACE['basic_auth'])
else:
REQUESTS_AUTH = None
XQUEUE_INTERFACE = XQueueInterface(
settings.XQUEUE_INTERFACE['url'],
settings.XQUEUE_INTERFACE['django_auth'],
REQUESTS_AUTH,
)
# TODO: course_id and course_key are used interchangeably in this file, which is wrong.
# Some brave person should make the variable names consistently someday, but the code's
# coupled enough that it's kind of tricky--you've been warned!
class LmsModuleRenderError(Exception):
"""
An exception class for exceptions thrown by module_render that don't fit well elsewhere
"""
pass
def make_track_function(request):
'''
Make a tracking function that logs what happened.
For use in ModuleSystem.
'''
import track.views
def function(event_type, event):
return track.views.server_track(request, event_type, event, page='x_module')
return function
def toc_for_course(user, request, course, active_chapter, active_section, field_data_cache):
'''
Create a table of contents from the module store
Return format:
{ 'chapters': [
{'display_name': name, 'url_name': url_name, 'sections': SECTIONS, 'active': bool},
],
'previous_of_active_section': {..},
'next_of_active_section': {..}
}
where SECTIONS is a list
[ {'display_name': name, 'url_name': url_name,
'format': format, 'due': due, 'active' : bool, 'graded': bool}, ...]
where previous_of_active_section and next_of_active_section have information on the
next/previous sections of the active section.
active is set for the section and chapter corresponding to the passed
parameters, which are expected to be url_names of the chapter+section.
Everything else comes from the xml, or defaults to "".
chapters with name 'hidden' are skipped.
NOTE: assumes that if we got this far, user has access to course. Returns
None if this is not the case.
field_data_cache must include data from the course module and 2 levels of its descendants
'''
with modulestore().bulk_operations(course.id):
course_module = get_module_for_descriptor(
user, request, course, field_data_cache, course.id, course=course
)
if course_module is None:
return None, None, None
toc_chapters = list()
chapters = course_module.get_display_items()
# Check for content which needs to be completed
# before the rest of the content is made available
required_content = milestones_helpers.get_required_content(course, user)
# The user may not actually have to complete the entrance exam, if one is required
if not user_must_complete_entrance_exam(request, user, course):
required_content = [content for content in required_content if not content == course.entrance_exam_id]
previous_of_active_section, next_of_active_section = None, None
last_processed_section, last_processed_chapter = None, None
found_active_section = False
for chapter in chapters:
# Only show required content, if there is required content
# chapter.hide_from_toc is read-only (bool)
display_id = slugify(chapter.display_name_with_default_escaped)
local_hide_from_toc = False
if required_content:
if unicode(chapter.location) not in required_content:
local_hide_from_toc = True
# Skip the current chapter if a hide flag is tripped
if chapter.hide_from_toc or local_hide_from_toc:
continue
sections = list()
for section in chapter.get_display_items():
# skip the section if it is hidden from the user
if section.hide_from_toc:
continue
is_section_active = (chapter.url_name == active_chapter and section.url_name == active_section)
if is_section_active:
found_active_section = True
section_context = {
'display_name': section.display_name_with_default_escaped,
'url_name': section.url_name,
'format': section.format if section.format is not None else '',
'due': section.due,
'active': is_section_active,
'graded': section.graded,
}
_add_timed_exam_info(user, course, section, section_context)
# update next and previous of active section, if applicable
if is_section_active:
if last_processed_section:
previous_of_active_section = last_processed_section.copy()
previous_of_active_section['chapter_url_name'] = last_processed_chapter.url_name
elif found_active_section and not next_of_active_section:
next_of_active_section = section_context.copy()
next_of_active_section['chapter_url_name'] = chapter.url_name
sections.append(section_context)
last_processed_section = section_context
last_processed_chapter = chapter
toc_chapters.append({
'display_name': chapter.display_name_with_default_escaped,
'display_id': display_id,
'url_name': chapter.url_name,
'sections': sections,
'active': chapter.url_name == active_chapter
})
return {
'chapters': toc_chapters,
'previous_of_active_section': previous_of_active_section,
'next_of_active_section': next_of_active_section,
}
def _add_timed_exam_info(user, course, section, section_context):
"""
Add in rendering context if exam is a timed exam (which includes proctored)
"""
section_is_time_limited = (
getattr(section, 'is_time_limited', False) and
settings.FEATURES.get('ENABLE_SPECIAL_EXAMS', False)
)
if section_is_time_limited:
# We need to import this here otherwise Lettuce test
# harness fails. When running in 'harvest' mode, the
# test service appears to get into trouble with
# circular references (not sure which as edx_proctoring.api
# doesn't import anything from edx-platform). Odd thing
# is that running: manage.py lms runserver --settings=acceptance
# works just fine, it's really a combination of Lettuce and the
# 'harvest' management command
#
# One idea is that there is some coupling between
# lettuce and the 'terrain' Djangoapps projects in /common
# This would need more investigation
from edx_proctoring.api import get_attempt_status_summary
#
# call into edx_proctoring subsystem
# to get relevant proctoring information regarding this
# level of the courseware
#
# This will return None, if (user, course_id, content_id)
# is not applicable
#
timed_exam_attempt_context = None
try:
timed_exam_attempt_context = get_attempt_status_summary(
user.id,
unicode(course.id),
unicode(section.location)
)
except Exception, ex: # pylint: disable=broad-except
# safety net in case something blows up in edx_proctoring
# as this is just informational descriptions, it is better
# to log and continue (which is safe) than to have it be an
# unhandled exception
log.exception(ex)
if timed_exam_attempt_context:
# yes, user has proctoring context about
# this level of the courseware
# so add to the accordion data context
section_context.update({
'proctoring': timed_exam_attempt_context,
})
def get_module(user, request, usage_key, field_data_cache,
position=None, log_if_not_found=True, wrap_xmodule_display=True,
grade_bucket_type=None, depth=0,
static_asset_path='', course=None):
"""
Get an instance of the xmodule class identified by location,
setting the state based on an existing StudentModule, or creating one if none
exists.
Arguments:
- user : User for whom we're getting the module
- request : current django HTTPrequest. Note: request.user isn't used for anything--all auth
and such works based on user.
- usage_key : A UsageKey object identifying the module to load
- field_data_cache : a FieldDataCache
- position : extra information from URL for user-specified
position within module
- log_if_not_found : If this is True, we log a debug message if we cannot find the requested xmodule.
- wrap_xmodule_display : If this is True, wrap the output display in a single div to allow for the
XModule javascript to be bound correctly
- depth : number of levels of descendents to cache when loading this module.
None means cache all descendents
- static_asset_path : static asset path to use (overrides descriptor's value); needed
by get_course_info_section, because info section modules
do not have a course as the parent module, and thus do not
inherit this lms key value.
Returns: xmodule instance, or None if the user does not have access to the
module. If there's an error, will try to return an instance of ErrorModule
if possible. If not possible, return None.
"""
try:
descriptor = modulestore().get_item(usage_key, depth=depth)
return get_module_for_descriptor(user, request, descriptor, field_data_cache, usage_key.course_key,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
course=course)
except ItemNotFoundError:
if log_if_not_found:
log.debug("Error in get_module: ItemNotFoundError")
return None
except:
# Something has gone terribly wrong, but still not letting it turn into a 500.
log.exception("Error in get_module")
return None
def get_xqueue_callback_url_prefix(request):
"""
Calculates default prefix based on request, but allows override via settings
This is separated from get_module_for_descriptor so that it can be called
by the LMS before submitting background tasks to run. The xqueue callbacks
should go back to the LMS, not to the worker.
"""
prefix = '{proto}://{host}'.format(
proto=request.META.get('HTTP_X_FORWARDED_PROTO', 'https' if request.is_secure() else 'http'),
host=request.get_host()
)
return settings.XQUEUE_INTERFACE.get('callback_url', prefix)
def get_module_for_descriptor(user, request, descriptor, field_data_cache, course_key,
position=None, wrap_xmodule_display=True, grade_bucket_type=None,
static_asset_path='', disable_staff_debug_info=False,
course=None):
"""
Implements get_module, extracting out the request-specific functionality.
disable_staff_debug_info : If this is True, exclude staff debug information in the rendering of the module.
See get_module() docstring for further details.
"""
track_function = make_track_function(request)
xqueue_callback_url_prefix = get_xqueue_callback_url_prefix(request)
user_location = getattr(request, 'session', {}).get('country_code')
student_kvs = DjangoKeyValueStore(field_data_cache)
if is_masquerading_as_specific_student(user, course_key):
student_kvs = MasqueradingKeyValueStore(student_kvs, request.session)
student_data = KvsFieldData(student_kvs)
return get_module_for_descriptor_internal(
user=user,
descriptor=descriptor,
student_data=student_data,
course_id=course_key,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=xblock_request_token(request),
disable_staff_debug_info=disable_staff_debug_info,
course=course
)
def get_module_system_for_user(user, student_data, # TODO # pylint: disable=too-many-statements
# Arguments preceding this comment have user binding, those following don't
descriptor, course_id, track_function, xqueue_callback_url_prefix,
request_token, position=None, wrap_xmodule_display=True, grade_bucket_type=None,
static_asset_path='', user_location=None, disable_staff_debug_info=False,
course=None):
"""
Helper function that returns a module system and student_data bound to a user and a descriptor.
The purpose of this function is to factor out everywhere a user is implicitly bound when creating a module,
to allow an existing module to be re-bound to a user. Most of the user bindings happen when creating the
closures that feed the instantiation of ModuleSystem.
The arguments fall into two categories: those that have explicit or implicit user binding, which are user
and student_data, and those don't and are just present so that ModuleSystem can be instantiated, which
are all the other arguments. Ultimately, this isn't too different than how get_module_for_descriptor_internal
was before refactoring.
Arguments:
see arguments for get_module()
request_token (str): A token unique to the request use by xblock initialization
Returns:
(LmsModuleSystem, KvsFieldData): (module system, student_data) bound to, primarily, the user and descriptor
"""
def make_xqueue_callback(dispatch='score_update'):
"""
Returns fully qualified callback URL for external queueing system
"""
relative_xqueue_callback_url = reverse(
'xqueue_callback',
kwargs=dict(
course_id=course_id.to_deprecated_string(),
userid=str(user.id),
mod_id=descriptor.location.to_deprecated_string(),
dispatch=dispatch
),
)
return xqueue_callback_url_prefix + relative_xqueue_callback_url
# Default queuename is course-specific and is derived from the course that
# contains the current module.
# TODO: Queuename should be derived from 'course_settings.json' of each course
xqueue_default_queuename = descriptor.location.org + '-' + descriptor.location.course
xqueue = {
'interface': XQUEUE_INTERFACE,
'construct_callback': make_xqueue_callback,
'default_queuename': xqueue_default_queuename.replace(' ', '_'),
'waittime': settings.XQUEUE_WAITTIME_BETWEEN_REQUESTS
}
def inner_get_module(descriptor):
"""
Delegate to get_module_for_descriptor_internal() with all values except `descriptor` set.
Because it does an access check, it may return None.
"""
# TODO: fix this so that make_xqueue_callback uses the descriptor passed into
# inner_get_module, not the parent's callback. Add it as an argument....
return get_module_for_descriptor_internal(
user=user,
descriptor=descriptor,
student_data=student_data,
course_id=course_id,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=request_token,
course=course
)
def publish(block, event_type, event):
"""A function that allows XModules to publish events."""
if event_type == 'grade' and not is_masquerading_as_specific_student(user, course_id):
SCORE_PUBLISHED.send(
sender=None,
block=block,
user=user,
raw_earned=event['value'],
raw_possible=event['max_value'],
only_if_higher=event.get('only_if_higher'),
)
else:
aside_context = {}
for aside in block.runtime.get_asides(block):
if hasattr(aside, 'get_event_context'):
aside_event_info = aside.get_event_context(event_type, event)
if aside_event_info is not None:
aside_context[aside.scope_ids.block_type] = aside_event_info
with tracker.get_tracker().context('asides', {'asides': aside_context}):
track_function(event_type, event)
def rebind_noauth_module_to_user(module, real_user):
"""
A function that allows a module to get re-bound to a real user if it was previously bound to an AnonymousUser.
Will only work within a module bound to an AnonymousUser, e.g. one that's instantiated by the noauth_handler.
Arguments:
module (any xblock type): the module to rebind
real_user (django.contrib.auth.models.User): the user to bind to
Returns:
nothing (but the side effect is that module is re-bound to real_user)
"""
if user.is_authenticated():
err_msg = ("rebind_noauth_module_to_user can only be called from a module bound to "
"an anonymous user")
log.error(err_msg)
raise LmsModuleRenderError(err_msg)
field_data_cache_real_user = FieldDataCache.cache_for_descriptor_descendents(
course_id,
real_user,
module.descriptor,
asides=XBlockAsidesConfig.possible_asides(),
)
student_data_real_user = KvsFieldData(DjangoKeyValueStore(field_data_cache_real_user))
(inner_system, inner_student_data) = get_module_system_for_user(
user=real_user,
student_data=student_data_real_user, # These have implicit user bindings, rest of args considered not to
descriptor=module.descriptor,
course_id=course_id,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=request_token,
course=course
)
module.descriptor.bind_for_student(
inner_system,
real_user.id,
[
partial(OverrideFieldData.wrap, real_user, course),
partial(LmsFieldData, student_data=inner_student_data),
],
)
module.descriptor.scope_ids = (
module.descriptor.scope_ids._replace(user_id=real_user.id)
)
module.scope_ids = module.descriptor.scope_ids # this is needed b/c NamedTuples are immutable
# now bind the module to the new ModuleSystem instance and vice-versa
module.runtime = inner_system
inner_system.xmodule_instance = module
# Build a list of wrapping functions that will be applied in order
# to the Fragment content coming out of the xblocks that are about to be rendered.
block_wrappers = []
if is_masquerading_as_specific_student(user, course_id):
block_wrappers.append(filter_displayed_blocks)
if settings.FEATURES.get("LICENSING", False):
block_wrappers.append(wrap_with_license)
# Wrap the output display in a single div to allow for the XModule
# javascript to be bound correctly
if wrap_xmodule_display is True:
block_wrappers.append(partial(
wrap_xblock,
'LmsRuntime',
extra_data={'course-id': course_id.to_deprecated_string()},
usage_id_serializer=lambda usage_id: quote_slashes(usage_id.to_deprecated_string()),
request_token=request_token,
))
# TODO (cpennington): When modules are shared between courses, the static
# prefix is going to have to be specific to the module, not the directory
# that the xml was loaded from
# Rewrite urls beginning in /static to point to course-specific content
block_wrappers.append(partial(
replace_static_urls,
getattr(descriptor, 'data_dir', None),
course_id=course_id,
static_asset_path=static_asset_path or descriptor.static_asset_path
))
# Allow URLs of the form '/course/' refer to the root of multicourse directory
# hierarchy of this course
block_wrappers.append(partial(replace_course_urls, course_id))
# this will rewrite intra-courseware links (/jump_to_id/<id>). This format
# is an improvement over the /course/... format for studio authored courses,
# because it is agnostic to course-hierarchy.
# NOTE: module_id is empty string here. The 'module_id' will get assigned in the replacement
# function, we just need to specify something to get the reverse() to work.
block_wrappers.append(partial(
replace_jump_to_id_urls,
course_id,
reverse('jump_to_id', kwargs={'course_id': course_id.to_deprecated_string(), 'module_id': ''}),
))
if settings.FEATURES.get('DISPLAY_DEBUG_INFO_TO_STAFF'):
if is_masquerading_as_specific_student(user, course_id):
# When masquerading as a specific student, we want to show the debug button
# unconditionally to enable resetting the state of the student we are masquerading as.
# We already know the user has staff access when masquerading is active.
staff_access = True
# To figure out whether the user has instructor access, we temporarily remove the
# masquerade_settings from the real_user. With the masquerading settings in place,
# the result would always be "False".
masquerade_settings = user.real_user.masquerade_settings
del user.real_user.masquerade_settings
instructor_access = bool(has_access(user.real_user, 'instructor', descriptor, course_id))
user.real_user.masquerade_settings = masquerade_settings
else:
staff_access = has_access(user, 'staff', descriptor, course_id)
instructor_access = bool(has_access(user, 'instructor', descriptor, course_id))
if staff_access:
block_wrappers.append(partial(add_staff_markup, user, instructor_access, disable_staff_debug_info))
# These modules store data using the anonymous_student_id as a key.
# To prevent loss of data, we will continue to provide old modules with
# the per-student anonymized id (as we have in the past),
# while giving selected modules a per-course anonymized id.
# As we have the time to manually test more modules, we can add to the list
# of modules that get the per-course anonymized id.
is_pure_xblock = isinstance(descriptor, XBlock) and not isinstance(descriptor, XModuleDescriptor)
module_class = getattr(descriptor, 'module_class', None)
is_lti_module = not is_pure_xblock and issubclass(module_class, LTIModule)
if is_pure_xblock or is_lti_module:
anonymous_student_id = anonymous_id_for_user(user, course_id)
else:
anonymous_student_id = anonymous_id_for_user(user, None)
field_data = LmsFieldData(descriptor._field_data, student_data) # pylint: disable=protected-access
user_is_staff = bool(has_access(user, u'staff', descriptor.location, course_id))
system = LmsModuleSystem(
track_function=track_function,
render_template=render_to_string,
static_url=settings.STATIC_URL,
xqueue=xqueue,
# TODO (cpennington): Figure out how to share info between systems
filestore=descriptor.runtime.resources_fs,
get_module=inner_get_module,
user=user,
debug=settings.DEBUG,
hostname=settings.SITE_NAME,
# TODO (cpennington): This should be removed when all html from
# a module is coming through get_html and is therefore covered
# by the replace_static_urls code below
replace_urls=partial(
static_replace.replace_static_urls,
data_directory=getattr(descriptor, 'data_dir', None),
course_id=course_id,
static_asset_path=static_asset_path or descriptor.static_asset_path,
),
replace_course_urls=partial(
static_replace.replace_course_urls,
course_key=course_id
),
replace_jump_to_id_urls=partial(
static_replace.replace_jump_to_id_urls,
course_id=course_id,
jump_to_id_base_url=reverse('jump_to_id', kwargs={'course_id': course_id.to_deprecated_string(), 'module_id': ''})
),
node_path=settings.NODE_PATH,
publish=publish,
anonymous_student_id=anonymous_student_id,
course_id=course_id,
cache=cache,
can_execute_unsafe_code=(lambda: can_execute_unsafe_code(course_id)),
get_python_lib_zip=(lambda: get_python_lib_zip(contentstore, course_id)),
# TODO: When we merge the descriptor and module systems, we can stop reaching into the mixologist (cpennington)
mixins=descriptor.runtime.mixologist._mixins, # pylint: disable=protected-access
wrappers=block_wrappers,
get_real_user=user_by_anonymous_id,
services={
'fs': FSService(),
'field-data': field_data,
'user': DjangoXBlockUserService(user, user_is_staff=user_is_staff),
'verification': VerificationService(),
'reverification': ReverificationService(),
'proctoring': ProctoringService(),
'milestones': milestones_helpers.get_service(),
'credit': CreditService(),
'bookmarks': BookmarksService(user=user),
},
get_user_role=lambda: get_user_role(user, course_id),
descriptor_runtime=descriptor._runtime, # pylint: disable=protected-access
rebind_noauth_module_to_user=rebind_noauth_module_to_user,
user_location=user_location,
request_token=request_token,
)
# pass position specified in URL to module through ModuleSystem
if position is not None:
try:
position = int(position)
except (ValueError, TypeError):
log.exception('Non-integer %r passed as position.', position)
position = None
system.set('position', position)
system.set(u'user_is_staff', user_is_staff)
system.set(u'user_is_admin', bool(has_access(user, u'staff', 'global')))
system.set(u'user_is_beta_tester', CourseBetaTesterRole(course_id).has_user(user))
system.set(u'days_early_for_beta', descriptor.days_early_for_beta)
# make an ErrorDescriptor -- assuming that the descriptor's system is ok
if has_access(user, u'staff', descriptor.location, course_id):
system.error_descriptor_class = ErrorDescriptor
else:
system.error_descriptor_class = NonStaffErrorDescriptor
return system, field_data
# TODO: Find all the places that this method is called and figure out how to
# get a loaded course passed into it
def get_module_for_descriptor_internal(user, descriptor, student_data, course_id, # pylint: disable=invalid-name
track_function, xqueue_callback_url_prefix, request_token,
position=None, wrap_xmodule_display=True, grade_bucket_type=None,
static_asset_path='', user_location=None, disable_staff_debug_info=False,
course=None):
"""
Actually implement get_module, without requiring a request.
See get_module() docstring for further details.
Arguments:
request_token (str): A unique token for this request, used to isolate xblock rendering
"""
(system, student_data) = get_module_system_for_user(
user=user,
student_data=student_data, # These have implicit user bindings, the rest of args are considered not to
descriptor=descriptor,
course_id=course_id,
track_function=track_function,
xqueue_callback_url_prefix=xqueue_callback_url_prefix,
position=position,
wrap_xmodule_display=wrap_xmodule_display,
grade_bucket_type=grade_bucket_type,
static_asset_path=static_asset_path,
user_location=user_location,
request_token=request_token,
disable_staff_debug_info=disable_staff_debug_info,
course=course
)
descriptor.bind_for_student(
system,
user.id,
[
partial(OverrideFieldData.wrap, user, course),
partial(LmsFieldData, student_data=student_data),
],
)
descriptor.scope_ids = descriptor.scope_ids._replace(user_id=user.id)
# Do not check access when it's a noauth request.
# Not that the access check needs to happen after the descriptor is bound
# for the student, since there may be field override data for the student
# that affects xblock visibility.
user_needs_access_check = getattr(user, 'known', True) and not isinstance(user, SystemUser)
if user_needs_access_check:
if not has_access(user, 'load', descriptor, course_id):
return None
return descriptor
def load_single_xblock(request, user_id, course_id, usage_key_string, course=None):
"""
Load a single XBlock identified by usage_key_string.
"""
usage_key = UsageKey.from_string(usage_key_string)
course_key = CourseKey.from_string(course_id)
usage_key = usage_key.map_into_course(course_key)
user = User.objects.get(id=user_id)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_key,
user,
modulestore().get_item(usage_key),
depth=0,
)
instance = get_module(user, request, usage_key, field_data_cache, grade_bucket_type='xqueue', course=course)
if instance is None:
msg = "No module {0} for user {1}--access denied?".format(usage_key_string, user)
log.debug(msg)
raise Http404
return instance
@csrf_exempt
def xqueue_callback(request, course_id, userid, mod_id, dispatch):
'''
Entry point for graded results from the queueing system.
'''
data = request.POST.copy()
# Test xqueue package, which we expect to be:
# xpackage = {'xqueue_header': json.dumps({'lms_key':'secretkey',...}),
# 'xqueue_body' : 'Message from grader'}
for key in ['xqueue_header', 'xqueue_body']:
if key not in data:
raise Http404
header = json.loads(data['xqueue_header'])
if not isinstance(header, dict) or 'lms_key' not in header:
raise Http404
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = modulestore().get_course(course_key, depth=0)
instance = load_single_xblock(request, userid, course_id, mod_id, course=course)
# Transfer 'queuekey' from xqueue response header to the data.
# This is required to use the interface defined by 'handle_ajax'
data.update({'queuekey': header['lms_key']})
# We go through the "AJAX" path
# So far, the only dispatch from xqueue will be 'score_update'
try:
# Can ignore the return value--not used for xqueue_callback
instance.handle_ajax(dispatch, data)
# Save any state that has changed to the underlying KeyValueStore
instance.save()
except:
log.exception("error processing ajax call")
raise
return HttpResponse("")
@csrf_exempt
def handle_xblock_callback_noauth(request, course_id, usage_id, handler, suffix=None):
"""
Entry point for unauthenticated XBlock handlers.
"""
request.user.known = False
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = modulestore().get_course(course_key, depth=0)
return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=course)
def handle_xblock_callback(request, course_id, usage_id, handler, suffix=None):
"""
Generic view for extensions. This is where AJAX calls go.
Arguments:
- request -- the django request.
- location -- the module location. Used to look up the XModule instance
- course_id -- defines the course context for this request.
Return 403 error if the user is not logged in. Raises Http404 if
the location and course_id do not identify a valid module, the module is
not accessible by the user, or the module raises NotFoundError. If the
module raises any other error, it will escape this function.
"""
if not request.user.is_authenticated():
return HttpResponse('Unauthenticated', status=403)
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
raise Http404("Invalid location")
with modulestore().bulk_operations(course_key):
try:
course = modulestore().get_course(course_key)
except ItemNotFoundError:
raise Http404("invalid location")
return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=course)
def get_module_by_usage_id(request, course_id, usage_id, disable_staff_debug_info=False, course=None):
"""
Gets a module instance based on its `usage_id` in a course, for a given request/user
Returns (instance, tracking_context)
"""
user = request.user
try:
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
usage_key = course_id.make_usage_key_from_deprecated_string(unquote_slashes(usage_id))
except InvalidKeyError:
raise Http404("Invalid location")
try:
descriptor = modulestore().get_item(usage_key)
descriptor_orig_usage_key, descriptor_orig_version = modulestore().get_block_original_usage(usage_key)
except ItemNotFoundError:
log.warn(
"Invalid location for course id %s: %s",
usage_key.course_key,
usage_key
)
raise Http404
tracking_context = {
'module': {
'display_name': descriptor.display_name_with_default_escaped,
'usage_key': unicode(descriptor.location),
}
}
# For blocks that are inherited from a content library, we add some additional metadata:
if descriptor_orig_usage_key is not None:
tracking_context['module']['original_usage_key'] = unicode(descriptor_orig_usage_key)
tracking_context['module']['original_usage_version'] = unicode(descriptor_orig_version)
unused_masquerade, user = setup_masquerade(request, course_id, has_access(user, 'staff', descriptor, course_id))
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_id,
user,
descriptor,
read_only=CrawlersConfig.is_crawler(request),
)
instance = get_module_for_descriptor(
user,
request,
descriptor,
field_data_cache,
usage_key.course_key,
disable_staff_debug_info=disable_staff_debug_info,
course=course
)
if instance is None:
# Either permissions just changed, or someone is trying to be clever
# and load something they shouldn't have access to.
log.debug("No module %s for user %s -- access denied?", usage_key, user)
raise Http404
return (instance, tracking_context)
def _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=None):
"""
Invoke an XBlock handler, either authenticated or not.
Arguments:
request (HttpRequest): the current request
course_id (str): A string of the form org/course/run
usage_id (str): A string of the form i4x://org/course/category/name@revision
handler (str): The name of the handler to invoke
suffix (str): The suffix to pass to the handler when invoked
"""
# Check submitted files
files = request.FILES or {}
error_msg = _check_files_limits(files)
if error_msg:
return JsonResponse({'success': error_msg}, status=413)
# Make a CourseKey from the course_id, raising a 404 upon parse error.
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
raise Http404
# Gather metrics for New Relic so we can slice data in New Relic Insights
newrelic.agent.add_custom_parameter('course_id', unicode(course_key))
newrelic.agent.add_custom_parameter('org', unicode(course_key.org))
with modulestore().bulk_operations(course_key):
instance, tracking_context = get_module_by_usage_id(request, course_id, usage_id, course=course)
# Name the transaction so that we can view XBlock handlers separately in
# New Relic. The suffix is necessary for XModule handlers because the
# "handler" in those cases is always just "xmodule_handler".
nr_tx_name = "{}.{}".format(instance.__class__.__name__, handler)
nr_tx_name += "/{}".format(suffix) if (suffix and handler == "xmodule_handler") else ""
newrelic.agent.set_transaction_name(nr_tx_name, group="Python/XBlock/Handler")
tracking_context_name = 'module_callback_handler'
req = django_to_webob_request(request)
try:
with tracker.get_tracker().context(tracking_context_name, tracking_context):
resp = instance.handle(handler, req, suffix)
if suffix == 'problem_check' \
and course \
and getattr(course, 'entrance_exam_enabled', False) \
and getattr(instance, 'in_entrance_exam', False):
ee_data = {'entrance_exam_passed': user_has_passed_entrance_exam(request, course)}
resp = append_data_to_webob_response(resp, ee_data)
except NoSuchHandlerError:
log.exception("XBlock %s attempted to access missing handler %r", instance, handler)
raise Http404
# If we can't find the module, respond with a 404
except NotFoundError:
log.exception("Module indicating to user that request doesn't exist")
raise Http404
# For XModule-specific errors, we log the error and respond with an error message
except ProcessingError as err:
log.warning("Module encountered an error while processing AJAX call",
exc_info=True)
return JsonResponse({'success': err.args[0]}, status=200)
# If any other error occurred, re-raise it to trigger a 500 response
except Exception:
log.exception("error executing xblock handler")
raise
return webob_to_django_response(resp)
def hash_resource(resource):
"""
Hash a :class:`xblock.fragment.FragmentResource
"""
md5 = hashlib.md5()
for data in resource:
md5.update(repr(data))
return md5.hexdigest()
def xblock_view(request, course_id, usage_id, view_name):
"""
Returns the rendered view of a given XBlock, with related resources
Returns a json object containing two keys:
html: The rendered html of the view
resources: A list of tuples where the first element is the resource hash, and
the second is the resource description
"""
if not settings.FEATURES.get('ENABLE_XBLOCK_VIEW_ENDPOINT', False):
log.warn("Attempt to use deactivated XBlock view endpoint -"
" see FEATURES['ENABLE_XBLOCK_VIEW_ENDPOINT']")
raise Http404
if not request.user.is_authenticated():
raise PermissionDenied
try:
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
except InvalidKeyError:
raise Http404("Invalid location")
with modulestore().bulk_operations(course_key):
course = modulestore().get_course(course_key)
instance, _ = get_module_by_usage_id(request, course_id, usage_id, course=course)
try:
fragment = instance.render(view_name, context=request.GET)
except NoSuchViewError:
log.exception("Attempt to render missing view on %s: %s", instance, view_name)
raise Http404
hashed_resources = OrderedDict()
for resource in fragment.resources:
hashed_resources[hash_resource(resource)] = resource
return JsonResponse({
'html': fragment.content,
'resources': hashed_resources.items(),
'csrf_token': unicode(csrf(request)['csrf_token']),
})
def _check_files_limits(files):
"""
Check if the files in a request are under the limits defined by
`settings.MAX_FILEUPLOADS_PER_INPUT` and
`settings.STUDENT_FILEUPLOAD_MAX_SIZE`.
Returns None if files are correct or an error messages otherwise.
"""
for fileinput_id in files.keys():
inputfiles = files.getlist(fileinput_id)
# Check number of files submitted
if len(inputfiles) > settings.MAX_FILEUPLOADS_PER_INPUT:
msg = 'Submission aborted! Maximum %d files may be submitted at once' % \
settings.MAX_FILEUPLOADS_PER_INPUT
return msg
# Check file sizes
for inputfile in inputfiles:
if inputfile.size > settings.STUDENT_FILEUPLOAD_MAX_SIZE: # Bytes
msg = 'Submission aborted! Your file "%s" is too large (max size: %d MB)' % \
(inputfile.name, settings.STUDENT_FILEUPLOAD_MAX_SIZE / (1000 ** 2))
return msg
return None
def append_data_to_webob_response(response, data):
"""
Appends data to a JSON webob response.
Arguments:
response (webob response object): the webob response object that needs to be modified
data (dict): dictionary containing data that needs to be appended to response body
Returns:
(webob response object): webob response with updated body.
"""
if getattr(response, 'content_type', None) == 'application/json':
response_data = json.loads(response.body)
response_data.update(data)
response.body = json.dumps(response_data)
return response
| prarthitm/edxplatform | lms/djangoapps/courseware/module_render.py | Python | agpl-3.0 | 47,090 |
from datetime import datetime
from django.http import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseForbidden,
)
from django.urls import reverse
from memoized import memoized
from tastypie import fields
from tastypie.authentication import Authentication
from tastypie.bundle import Bundle
from tastypie.exceptions import BadRequest
from casexml.apps.case.xform import get_case_updates
from corehq.apps.api.query_adapters import GroupQuerySetAdapter
from corehq.apps.api.resources.pagination import DoesNothingPaginatorCompat
from corehq.apps.api.es import ElasticAPIQuerySet, FormESView, es_query_from_get_params
from corehq.apps.api.fields import (
ToManyDictField,
ToManyDocumentsField,
ToManyListDictField,
UseIfRequested,
)
from corehq.apps.api.models import ESCase, ESXFormInstance
from corehq.apps.api.resources import (
CouchResourceMixin,
DomainSpecificResourceMixin,
HqBaseResource,
SimpleSortableResourceMixin,
v0_1,
v0_3,
)
from corehq.apps.api.resources.auth import (
DomainAdminAuthentication,
LoginAndDomainAuthentication,
RequirePermissionAuthentication,
)
from corehq.apps.api.resources.meta import CustomResourceMeta
from corehq.apps.api.resources.v0_1 import _safe_bool
from corehq.apps.api.serializers import (
CommCareCaseSerializer,
XFormInstanceSerializer,
)
from corehq.apps.api.util import get_obj, get_object_or_not_exist
from corehq.apps.app_manager.app_schemas.case_properties import (
get_all_case_properties,
)
from corehq.apps.app_manager.dbaccessors import (
get_all_built_app_results,
get_apps_in_domain,
)
from corehq.apps.app_manager.models import Application, RemoteApp, LinkedApplication
from corehq.apps.groups.models import Group
from corehq.apps.users.models import CouchUser, Permissions
from corehq.apps.users.util import format_username
from corehq.motech.repeaters.models import CommCareCase, Repeater, get_all_repeater_types
from corehq.util.view_utils import absolute_reverse
from no_exceptions.exceptions import Http400
# By the time a test case is running, the resource is already instantiated,
# so as a hack until this can be remedied, there is a global that
# can be set to provide a mock.
MOCK_XFORM_ES = None
class XFormInstanceResource(SimpleSortableResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
"""This version of the form resource is built of Elasticsearch data
which gets wrapped by ``ESXFormInstance``.
No type conversion is done e.g. dates and some fields are named differently than in the
Python models.
"""
id = fields.CharField(attribute='_id', readonly=True, unique=True)
domain = fields.CharField(attribute='domain')
form = fields.DictField(attribute='form_data')
type = fields.CharField(attribute='type')
version = fields.CharField(attribute='version')
uiversion = fields.CharField(attribute='uiversion', blank=True, null=True)
metadata = fields.DictField(attribute='metadata', blank=True, null=True)
received_on = fields.CharField(attribute="received_on")
edited_on = fields.CharField(attribute="edited_on", null=True)
server_modified_on = fields.CharField(attribute="server_modified_on")
indexed_on = fields.CharField(attribute='inserted_at')
app_id = fields.CharField(attribute='app_id', null=True)
build_id = fields.CharField(attribute='build_id', null=True)
initial_processing_complete = fields.BooleanField(
attribute='initial_processing_complete', null=True)
problem = fields.CharField(attribute='problem', null=True)
archived = fields.CharField(readonly=True)
def dehydrate_archived(self, bundle):
return bundle.obj.is_archived
cases = UseIfRequested(
ToManyDocumentsField(
'corehq.apps.api.resources.v0_4.CommCareCaseResource',
attribute=lambda xform: _cases_referenced_by_xform(xform)
)
)
attachments = fields.DictField(readonly=True, null=True)
def dehydrate_attachments(self, bundle):
attachments_dict = getattr(bundle.obj, 'blobs', None)
if not attachments_dict:
return {}
domain = bundle.obj.domain
form_id = bundle.obj._id
def _normalize_meta(name, meta):
return {
'content_type': meta.content_type,
'length': meta.content_length,
'url': absolute_reverse('api_form_attachment', args=(domain, form_id, name))
}
return {
name: _normalize_meta(name, meta) for name, meta in attachments_dict.items()
}
is_phone_submission = fields.BooleanField(readonly=True)
def dehydrate_is_phone_submission(self, bundle):
headers = getattr(bundle.obj, 'openrosa_headers', None)
if not headers:
return False
return headers.get('HTTP_X_OPENROSA_VERSION') is not None
edited_by_user_id = fields.CharField(readonly=True, null=True)
def dehydrate_edited_by_user_id(self, bundle):
if bundle.obj.edited_on:
return (getattr(bundle.obj, 'auth_context') or {}).get('user_id', None)
def obj_get(self, bundle, **kwargs):
instance_id = kwargs['pk']
domain = kwargs['domain']
return self.xform_es(domain).get_document(instance_id)
def xform_es(self, domain):
return MOCK_XFORM_ES or FormESView(domain)
def obj_get_list(self, bundle, domain, **kwargs):
try:
es_query = es_query_from_get_params(bundle.request.GET, domain, ['include_archived'])
except Http400 as e:
raise BadRequest(str(e))
# Note that FormESView is used only as an ES client, for `run_query` against the proper index
return ElasticAPIQuerySet(
payload=es_query,
model=ESXFormInstance,
es_client=self.xform_es(domain)
).order_by('-received_on')
def detail_uri_kwargs(self, bundle_or_obj):
return {
'pk': get_obj(bundle_or_obj).form_id
}
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_data)
object_class = ESXFormInstance
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'form'
ordering = ['received_on', 'server_modified_on', 'indexed_on']
serializer = XFormInstanceSerializer(formats=['json'])
def _cases_referenced_by_xform(esxform):
"""Get a list of cases referenced by ESXFormInstance
Note: this does not load cases referenced in stock transactions
because ESXFormInstance does not have access to form XML, which
is needed to find stock transactions.
"""
assert esxform.domain, esxform.form_id
case_ids = set(cu.id for cu in get_case_updates(esxform))
return CommCareCase.objects.get_cases(list(case_ids), esxform.domain)
class RepeaterResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
id = fields.CharField(attribute='_id', readonly=True, unique=True)
type = fields.CharField(attribute='doc_type')
domain = fields.CharField(attribute='domain')
url = fields.CharField(attribute='url')
version = fields.CharField(attribute='version', null=True)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'):
if isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
elif bundle_or_obj is None:
return None
else:
obj = bundle_or_obj
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=obj.domain,
api_name=self._meta.api_name,
pk=obj._id))
def obj_get_list(self, bundle, domain, **kwargs):
repeaters = Repeater.by_domain(domain)
return list(repeaters)
def obj_get(self, bundle, **kwargs):
return get_object_or_not_exist(Repeater, kwargs['pk'], kwargs['domain'],
additional_doc_types=list(get_all_repeater_types()))
def obj_create(self, bundle, request=None, **kwargs):
bundle.obj.domain = kwargs['domain']
bundle = self._update(bundle)
bundle.obj.save()
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = Repeater.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
bundle = self._update(bundle)
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
def _update(self, bundle):
for key, value in bundle.data.items():
setattr(bundle.obj, key, value)
bundle = self.full_hydrate(bundle)
return bundle
class Meta(CustomResourceMeta):
authentication = DomainAdminAuthentication()
object_class = Repeater
resource_name = 'data-forwarding'
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
class CommCareCaseResource(SimpleSortableResourceMixin, v0_3.CommCareCaseResource, DomainSpecificResourceMixin):
xforms_by_name = UseIfRequested(ToManyListDictField(
'corehq.apps.api.resources.v0_4.XFormInstanceResource',
attribute='xforms_by_name'
))
xforms_by_xmlns = UseIfRequested(ToManyListDictField(
'corehq.apps.api.resources.v0_4.XFormInstanceResource',
attribute='xforms_by_xmlns'
))
child_cases = UseIfRequested(
ToManyDictField(
'corehq.apps.api.resources.v0_4.CommCareCaseResource',
attribute='child_cases'
)
)
parent_cases = UseIfRequested(
ToManyDictField(
'corehq.apps.api.resources.v0_4.CommCareCaseResource',
attribute='parent_cases'
)
)
domain = fields.CharField(attribute='domain')
date_modified = fields.CharField(attribute='modified_on', default="1900-01-01")
indexed_on = fields.CharField(attribute='inserted_at', default="1900-01-01")
server_date_modified = fields.CharField(attribute='server_modified_on', default="1900-01-01")
server_date_opened = fields.CharField(attribute='server_opened_on', default="1900-01-01")
opened_by = fields.CharField(attribute='opened_by', null=True)
closed_by = fields.CharField(attribute='closed_by', null=True)
def obj_get(self, bundle, **kwargs):
case_id = kwargs['pk']
domain = kwargs['domain']
return self.case_es(domain).get_document(case_id)
class Meta(v0_3.CommCareCaseResource.Meta):
max_limit = 5000
serializer = CommCareCaseSerializer()
ordering = ['server_date_modified', 'date_modified', 'indexed_on']
object_class = ESCase
class GroupResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
id = fields.CharField(attribute='get_id', unique=True, readonly=True)
domain = fields.CharField(attribute='domain')
name = fields.CharField(attribute='name')
users = fields.ListField(attribute='get_user_ids')
case_sharing = fields.BooleanField(attribute='case_sharing', default=False)
reporting = fields.BooleanField(default=True, attribute='reporting')
metadata = fields.DictField(attribute='metadata', null=True, blank=True)
def obj_get(self, bundle, **kwargs):
return get_object_or_not_exist(Group, kwargs['pk'], kwargs['domain'])
def obj_get_list(self, bundle, domain, **kwargs):
return GroupQuerySetAdapter(domain)
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_commcare_users)
object_class = Group
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'group'
class SingleSignOnResource(HqBaseResource, DomainSpecificResourceMixin):
"""
This resource does not require "authorization" per se, but
rather allows a POST of username and password and returns
just the authenticated user, if the credentials and domain
are correct.
"""
def post_list(self, request, **kwargs):
domain = kwargs.get('domain')
request.domain = domain
username = request.POST.get('username')
password = request.POST.get('password')
if username is None:
return HttpResponseBadRequest('Missing required parameter: username')
if password is None:
return HttpResponseBadRequest('Missing required parameter: password')
if '@' not in username:
username = format_username(username, domain)
# Convert to the appropriate type of user
couch_user = CouchUser.get_by_username(username)
if couch_user is None or not couch_user.is_member_of(domain) or not couch_user.check_password(password):
return HttpResponseForbidden()
if couch_user.is_commcare_user():
user_resource = v0_1.CommCareUserResource()
elif couch_user.is_web_user():
user_resource = v0_1.WebUserResource()
else:
return HttpResponseForbidden()
bundle = user_resource.build_bundle(obj=couch_user, request=request)
bundle = user_resource.full_dehydrate(bundle)
return user_resource.create_response(request, bundle, response_class=HttpResponse)
def get_list(self, bundle, **kwargs):
return HttpResponseForbidden()
def get_detail(self, bundle, **kwargs):
return HttpResponseForbidden()
class Meta(CustomResourceMeta):
authentication = Authentication()
resource_name = 'sso'
detail_allowed_methods = []
list_allowed_methods = ['post']
class BaseApplicationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
def obj_get_list(self, bundle, domain, **kwargs):
return sorted(get_apps_in_domain(domain, include_remote=False),
key=lambda app: app.date_created or datetime.min)
def obj_get(self, bundle, **kwargs):
# support returning linked applications upon receiving an application request
return get_object_or_not_exist(Application, kwargs['pk'], kwargs['domain'],
additional_doc_types=[LinkedApplication._doc_type])
class Meta(CustomResourceMeta):
authentication = LoginAndDomainAuthentication(allow_session_auth=True)
object_class = Application
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'application'
paginator_class = DoesNothingPaginatorCompat
class ApplicationResource(BaseApplicationResource):
id = fields.CharField(attribute='_id')
name = fields.CharField(attribute='name')
version = fields.IntegerField(attribute='version')
is_released = fields.BooleanField(attribute='is_released', null=True)
built_on = fields.DateTimeField(attribute='built_on', null=True)
build_comment = fields.CharField(attribute='build_comment', null=True)
built_from_app_id = fields.CharField(attribute='copy_of', null=True)
modules = fields.ListField()
versions = fields.ListField()
@staticmethod
def dehydrate_versions(bundle):
app = bundle.obj
if app.copy_of:
return []
results = get_all_built_app_results(app.domain, app.get_id)
return [
{
'id': result['value']['_id'],
'built_on': result['value']['built_on'],
'build_comment': result['value']['build_comment'],
'is_released': result['value']['is_released'],
'version': result['value']['version'],
}
for result in results
]
@memoized
def get_all_case_properties_local(self, app):
return get_all_case_properties(app, exclude_invalid_properties=False)
def dehydrate_module(self, app, module, langs):
"""
Convert a Module object to a JValue representation
with just the good parts.
NOTE: This is not a tastypie "magic"-name method to
dehydrate the "module" field; there is no such field.
"""
try:
dehydrated = {}
dehydrated['case_type'] = module.case_type
all_case_properties = self.get_all_case_properties_local(app)
dehydrated['case_properties'] = all_case_properties[module.case_type]
dehydrated['unique_id'] = module.unique_id
dehydrated['forms'] = []
for form in module.forms:
form_unique_id = form.unique_id
form_jvalue = {
'xmlns': form.xmlns,
'name': form.name,
'questions': form.get_questions(
langs,
include_triggers=True,
include_groups=True,
include_translations=True,
include_fixtures=True,
),
'unique_id': form_unique_id,
}
dehydrated['forms'].append(form_jvalue)
return dehydrated
except Exception as e:
return {
'error': str(e)
}
def dehydrate_modules(self, bundle):
app = bundle.obj
# support returning linked applications upon receiving an application list request
if app.doc_type in [Application._doc_type, LinkedApplication._doc_type]:
return [self.dehydrate_module(app, module, app.langs) for module in bundle.obj.modules]
elif app.doc_type == RemoteApp._doc_type:
return []
def dehydrate(self, bundle):
if not _safe_bool(bundle, "extras"):
return super(ApplicationResource, self).dehydrate(bundle)
else:
app_data = {}
app_data.update(bundle.obj._doc)
app_data.update(bundle.data)
return app_data
| dimagi/commcare-hq | corehq/apps/api/resources/v0_4.py | Python | bsd-3-clause | 18,133 |
def calculate_firewall_cost(firewall, delay=0):
layers = max(firewall.keys()) + 1
hits = []
for iteration in range(0, layers):
if iteration not in firewall:
continue
period = (firewall[iteration] - 1) * 2
if (iteration + delay) % period == 0:
if delay:
return False
hits.append(iteration * firewall[iteration])
return sum(hits), len(hits)
def find_first_passable_iteration(firewall):
for delay in range(0, 10000):
if calculate_firewall_cost(firewall, delay=delay)[1] == 0:
return delay
def test_calculate_firewall_cost():
assert 24 == calculate_firewall_cost({0: 3, 1: 2, 4: 4, 6: 4})[0]
def test_calculate_firewall_delay():
assert 10 == find_first_passable_iteration({0: 3, 1: 2, 4: 4, 6: 4})
if __name__ == "__main__":
inp = {}
for line in open("input/dec13").readlines():
vals = [int(x) for x in line.strip().split(': ')]
if vals:
inp[vals[0]] = vals[1]
print(calculate_firewall_cost(inp)[0])
for i in range(0, 10000000):
res = calculate_firewall_cost(inp, delay=i)
if i%10000 == 0:
print(" .. " + str(i))
if res and res[1] == 0:
print(i)
break
| matslindh/codingchallenges | adventofcode2017/13.py | Python | mit | 1,380 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import IntegrityError, models, transaction
from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
db.commit_transaction()
dupe_envs = orm.Environment.objects.values('name', 'organization_id')\
.annotate(ecount=models.Count('id'))\
.filter(ecount__gt=1)
for env in RangeQuerySetWrapperWithProgressBar(dupe_envs):
name = env['name']
organization_id = env['organization_id']
envs = list(orm.Environment.objects.filter(
name=name,
organization_id=organization_id,
).order_by('date_added'))
to_env = envs[0]
from_envs = envs[1:]
try:
with transaction.atomic():
orm.EnvironmentProject.objects.filter(
environment__in=from_envs,
).update(environment=to_env)
except IntegrityError:
for ep in orm.EnvironmentProject.objects.filter(environment__in=from_envs):
try:
with transaction.atomic():
ep.update(environment=to_env)
except IntegrityError:
ep.delete()
from_env_ids = [e.id for e in from_envs]
try:
with transaction.atomic():
orm.ReleaseEnvironment.objects.filter(
environment_id__in=from_env_ids,
).update(environment_id=to_env.id)
except IntegrityError:
for re in orm.ReleaseEnvironment.objects.filter(environment_id__in=from_env_ids):
try:
with transaction.atomic():
re.update(environment_id=to_env.id)
except IntegrityError:
re.delete()
orm.Environment.objects.filter(id__in=from_env_ids).delete()
dupe_release_envs = orm.ReleaseEnvironment.objects.values(
'release_id', 'organization_id', 'environment_id'
).annotate(
recount=models.Count('id')
).filter(recount__gt=1)
for renv in RangeQuerySetWrapperWithProgressBar(dupe_release_envs):
release_id = renv['release_id']
organization_id = renv['organization_id']
environment_id = renv['environment_id']
renvs = list(orm.ReleaseEnvironment.objects.filter(
release_id=release_id,
organization_id=organization_id,
environment_id=environment_id,
).order_by('first_seen'))
to_renv = renvs[0]
from_renvs = renvs[1:]
last_seen = max([re.last_seen for re in renvs])
to_renv.update(last_seen=last_seen)
orm.ReleaseEnvironment.objects.filter(
id__in=[re.id for re in from_renvs],
).delete()
db.start_transaction()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.apitoken': {
'Meta': {'object_name': 'ApiToken'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True'}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authenticator': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'"},
'config': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 3, 2, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.commit': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {'unique_together': "(('organization_id', 'email'),)", 'object_name': 'CommitAuthor'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.commitfilechange': {
'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'sentry.counter': {
'Meta': {'object_name': 'Counter', 'db_table': "'sentry_projectcounter'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.dsymbundle': {
'Meta': {'object_name': 'DSymBundle'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymObject']"}),
'sdk': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymSDK']"})
},
'sentry.dsymobject': {
'Meta': {'object_name': 'DSymObject'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_path': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'db_index': 'True'}),
'vmaddr': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'vmsize': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'})
},
'sentry.dsymsdk': {
'Meta': {'object_name': 'DSymSDK', 'index_together': "[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"},
'dsym_type': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'sdk_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'version_build': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {'unique_together': "[('object', 'address')]", 'object_name': 'DSymSymbol'},
'address': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymObject']"}),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {'unique_together': "(('project_id', 'name'),)", 'object_name': 'Environment'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'})
},
'sentry.environmentproject': {
'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}),
'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"})
},
'sentry.eventtag': {
'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project', 'ident'), ('project', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.fileblobindex': {
'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {'object_name': 'GlobalDSymFile'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcommitresolution': {
'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {'object_name': 'GroupRedirect'},
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'})
},
'sentry.grouprelease': {
'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease'},
'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.groupsubscription': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Project']"}),
'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('group', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project', 'key', 'value', 'last_seen'),)"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationavatar': {
'Meta': {'object_name': 'OrganizationAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationonboardingtask': {
'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectbookmark': {
'Meta': {'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.projectdsymfile': {
'Meta': {'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.rawevent': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent'},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.release': {
'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasecommit': {
'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseenvironment': {
'Meta': {'unique_together': "(('project_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseproject': {
'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.repository': {
'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.reprocessingreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useravatar': {
'Meta': {'object_name': 'UserAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"})
},
'sentry.useremail': {
'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail'},
'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'emails'", 'to': "orm['sentry.User']"}),
'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'xSM70zG7MyRUVIUcaNBY2CyvizXoGfhQ'", 'max_length': '32'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
}
}
complete_apps = ['sentry']
symmetrical = True
| BuildingLink/sentry | src/sentry/south_migrations/0302_merge_environments.py | Python | bsd-3-clause | 70,522 |
# -*- encoding: utf-8 -*-
#ABECEDARIO
abecedario = map(chr, range(97, 123))
index = abecedario.index("n") + 1
abecedario.insert(index, "ñ")
vocales = ["a", "e", "i", "o", "u"]
#DICCIONARIOS
Acciones = ["medir", "comprar", "querer"]
Personales = ['yo','tu','el','nosotros','ustedes','ellos']
Pronombres = ["casa", "apartamento"]
Adjetivos = ["pequeña", "grande"]
Indeterminados = ["un", "una", "unos", "unas"]
Unidades = ["m2", "Kilometros", "km"]
Condiciones = ["si", "tal vez", "puede ser", "mientras"]
pronombres = [Personales, Acciones, Pronombres, Adjetivos, Indeterminados, Unidades, Condiciones]
#CONJUGACIONES
#FORMAS SIMPLES DEL INDICATIVO
#Presente Indicativo #Presente
TAR = ["o","as","a","amos","áis","an"]
TER = ["o","es","e","emos","éis","en"]
TIR = ["o","es","e","imos","ís","en"]
#Futuro de Indicativo #Futuro
TAR = ["é","ás","á","emos","éis","án"]
TER = ["é","ás","ás","emos","éis","án"]
TIR = ["é","ás","á","emos","éis","án"]
#Preterito Perfecto Simple de Indicativo #Pasado
TAR = ["é","aste","ó","amos","asteis","aron"]
TER = ["í","iste","ió","imos","isteis","ieron"]
TIR = ["í","iste","ió","imos","isteis","ieron"]
#Preterio Imperfecto de Indicativo #Pasado
PIIAR = ["aba","abas","aba","ábamos","abais","aban"]
TER = ["aí","ias","ía","íamos","íais","ían"]
TIR = ["aí","ias","ía","íamos","íais","ían"]
#Condicional Indicativo #Futuro
TAR = ["ía", "ías", "ía", "íamos", "íais", "ían"]
TER = ["ía", "ías", "ía", "íamos", "íais", "ían"]
TIR = ["ía", "ías", "ía", "íamos", "íais", "ían"]
#FORMAS SIMPLES DEL SUBJUNTIVO
#Presente Subjuntivo #Presente
TAR = ["e","es","e","emos","éis","en"]
TER = ["a","as","a","amos","áis","an"]
TIR = ["a","as","a","amos","áis","an"]
#Pretérito Imperfecto de Subjuntivo #Pasado
TAR = ["ara","aras","ara","áramos","arais","aban"]
TER = ["iera","ieras","iera","ieramos","ieras","ieran"]
TIR = ["iera","ieras","iera","ieramos","ieras","ieran"]
#Pretérito Imperfecto de Subjuntivo 2 #Pasado
TAR = ["ase","ases","ase","ásemos","aseis","asen"]
TER = ["iese","ieses","iese","ésemos","ieseis","iesen"]
TIR = ["iese","ieses","iese","ésemos","ieseis","iesen"]
#Futuro de Subjuntivo #Futuro
TAR = ["ere", "ares", "are", "áremos", "areis", "aren"]
TER = ["ere", "ares", "are", "áremos", "areis", "aren"]
TIR = ["ere", "ares", "are", "áremos", "areis", "aren"]
#FORMAS COMPUESTAS DEL INDICATIVO
#Pretérito Perfecto Compuesto de Indicativo #Pasado
Auxiliar = ["he", "has", "ha", "hay", "hemos", "habéis", "han"]
TAR = ["ado"]
TER = ["ido"]
TIR = ["ido"]
#Pretérito Pluscuamperfecto de Indicativo #Pasado
Auxiliar = ["habia", "habias", "habia", "habíamos", "habñiais", "habían"]
TAR = ["ado"]
TER = ["ido"]
TIR = ["ido"]
#Futuro Perfecto de Indicativo #Futuro
Auxiliar = ["habré", "habrás", "habrá", "habremos", "habréis", "habrán"]
TAR = ["ado"]
TER = ["ido"]
TIR = ["ido"]
#Condicional Perfecto de Indicativo #Futuro
Auxiliar = ["habría", "habrías", "habría", "habríamos", "habríais", "habrían"]
TAR = ["ado"]
TER = ["ido"]
TIR = ["ido"]
#Preterito Anterior
Auxiliar = ["hube", "hubiese", "hubo", "hubimos", "hubisteis", "hubieron"]
#FORMAS COMPUESTAS DEL SUBJUNTIVO
#Infinitivo Compuesto
Auxiliar = "haber"
#Pretérito Pluscuamperfecto de Subjuntivo #Pasado
Auxiliar = ["hubiese", "hubieses", "hubieses", "hubiésemos", "hubieseis", "hubiesen"]
#Pretérito Pluscuamperfecto de Subjuntivo 2 #Pasado
Auxiliar = ["hubiera", "hubieras", "hubiera", "hubiéramos", "hubierais", "hubieran"]
#Futuro Perfecto #Futuro
Auxiliar = ["hubiere", "hubieres", "hubiere", "hubiéremos", "hubiereis", "hubieren"]
#Gerundio Compuesto
Auxiliar = "Habiendo"
#LIBRERIA
conjugaciones = [Auxiliar, PIIAR, TIR, TAR, TER]
#PRONONMBRES PERSONALES
pppps = ["yo", "me", "mí", "conmigo"]
pppss = ["tú", "te", "ti", "contigo"]
pppts = ["él", "ella", "lo", "la", "le", "se", "sí", "consigo"]
ppppp = ["nosotros", "nosotras", "nos"]
pppsp = ["vosotros", "vosotras", "os"]
ppptp = ["ellos", "ellas", "los", "las", "les"]
#PRONONMBRES DEMOSTRATIVOS
#CERCANIA
pdc = ["este", "esta", "esto", "estos", "estas"]
#DISTANCIA MEDIA
pddm = ["ese", "esa", "eso", "esos", "esas"]
#LEJANIA
pdl = ["aquel", "aquella", "aquello", "aquellos", "aquellas"]
#PRONONMBRES POSESIVOS
#UNICO POSEEDOR
ppup = ["mío", "tuyo", "suyo", "mía", "tuya", "suya"]
#MULTIPLES POSEEDORES
ppmp = ["nuestro", "vuestro", "nuestra", "vuestra"]
#PRONONOMBRES NUMERALES
#CARDINALES
pnc = ["uno", "una"] #Todos los numeros
#ORDINALES
pno = ["primero", "segundo", "primera", "segunda"]
#MULTIPLICATIVOS
pnm = ["doble", "triple"]
#PARTITIVOS
pnp = ["medio"]
#LIBRERIA
diccionario = [pppps, pppss, pppts, ppppp, pppsp,
ppptp, pdc, pddm, pdl, ppup, ppmp, pnc, pno,
pnm, pnp]
verbos = ["Cantare", "Cantabamos", "Canta", "Jugaba"]
verbo = verbos[3]
#SINTAXIS
#Consonantes
def Consonantes():
consonantes = []
for letra in abecedario:
if letra in vocales:
pass
else:
consonantes.append(letra)
return consonantes
print Consonantes()
#Fonemas
def Fonemas():
consonantes = Consonantes()
fonemas = []
for letra in consonantes:
for vocal in vocales:
fonemas.append(letra + vocal)
return fonemas
print Fonemas()
#Buscar Verbo Conjugado
def Conjugacion(verbo):
index = 1
terminacion = verbo[-index:len(verbo)]
#Buscar Tipo Pronombre
def Pronombre():
for grupo in diccionario:
index = 0
for elemento in grupo:
print elemento
# for Persona in zip(singular, plural):
# print Verbo[0:len(Verbo)-2] + terminacion[]
######## REVISAR
singular = {"yo":"primera",
"tu":"segunda",
"el":"tercera"}
plural = {"nosotros":"primera",
"ustedes":"segunda",
"ellos":"tercera"}
#LEXEMAS
#MORFEMAS
#TIEMPO
#LUGAR
#SENTIMIENTOS
#SENTIDO
#PREDICADO
#SINTAXIS
#PRAGMASIS
#SEMANTICA
| codeapps/pythonista | Conjugaciones.py | Python | mit | 5,814 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
###############################################################################
#
# Rule Classs
#
###############################################################################
class Rule():
"""Transform a formated rule into stochastic matrix"""
def __init__(self, rule):
l, r = rule.replace(' ','').split('=')
self.op = []
self.left = self.init_op(l)
self.right = self.init_op(r)
self.rule = "%s%s%s = %s%s%s" % (self.left[0], self.op[0], self.left[1], self.right[0], self.op[1], self.right[1])
def __repr__(self):
return self.rule
def init_op(self, LorR):
if len(LorR) == 4:
self.op.append('.')
return [LorR[0:2], LorR[2:4]]
elif len(LorR) == 5:
self.op.append(LorR[2])
return LorR.split(LorR[2])
else:
print "Error : not valid rule"
return -1
def is_state_modified(self, i):
return self.left[i][1] != self.right[i][1]
def get_final_state(self, i):
return self.right[i][1]
###############################################################################
#
# Rule Generator Classs
#
###############################################################################
class RuleGenerator():
""" Given a list of regex rules, create a list of rules """
def __init__(self, l_regex_rules, l_type):
self.l_regex_rules = l_regex_rules
self.l_rules = []
while self.l_regex_rules:
idx_star = find(self.l_regex_rules[0], '*')
idx_sharp = find(self.l_regex_rules[0], '#')
if idx_star or idx_sharp:
for part_type_star in l_type:
for part_type_sharp in l_type:
current = self.l_regex_rules[0]
for star in idx_star:
current = replace_at_idx( current, part_type_star, star )
for sharp in idx_sharp:
current = replace_at_idx( current, part_type_sharp, sharp )
self.l_rules.append(current)
else:
self.l_rules.append(self.l_regex_rules[0])
self.l_regex_rules.pop(0)
self.l_rules = list(set(self.l_rules))
# print self.l_rules
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def find(s, ch):
return [i for i, ltr in enumerate(s) if ltr == ch]
def replace_at_idx(s, ch, idx):
return s[:idx] + ch + s[idx+1:]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if __name__ == '__main__':
r = Rule("a0+a1 = a1a2")
print r
print r.get_final_state(1) | Fritzip/ReactionNetwork | SRC/rule.py | Python | gpl-2.0 | 2,449 |
import recontree as RT, mul_tree as MT, global_vars as globs
def orthLabel(gene_trees, min_maps, min_tree, min_clade):
orthfile = open(globs.orth_file_name, "w");
lfile = open(globs.labeled_tree_file, "w");
orthfile.write(MT.mulPrint(min_tree, min_clade));
orthfile.write("\n------------------\n");
for gene_num in gene_trees:
gt, ginfo = gene_trees[gene_num];
# Retrieve gene tree info.
cur_results = min_maps[gene_num];
# Getting the maps for the current gene tree.
if len(cur_results) != 1:
orthfile.write(str(gene_num+1) + "\t* " + str(len(cur_results)) + " maps tied for lowest score. Mapping orthologies to each set of maps!\n");
lfile.write(str(gene_num+1) + "\t* " + str(len(cur_results)) + " maps tied for lowest score. Labeling trees for each set of maps!\n");
for each_result in cur_results:
maps, dups = each_result[3], each_result[4];
spec_genes = {};
for gene in ginfo:
if ginfo[gene][2] == 'tip':
spec = gene[gene.rfind("_")+1:];
if spec in min_clade:
cur_map = maps[gene];
if "*" in cur_map[0]:
labeled_gene = gene + "+";
gt = gt.replace(gene, labeled_gene);
else:
labeled_gene = gene + "^";
gt = gt.replace(gene, labeled_gene);
if spec not in spec_genes:
spec_genes[spec] = [gene];
else:
spec_genes[spec].append(gene);
lfile.write(str(gene_num+1) + "\t" + gt + "\n");
flag = 0;
done = [];
outline = str(gene_num+1) + "\t";
for spec in spec_genes:
if len(spec_genes[spec]) == 1:
outline += spec_genes[spec][0] + "-SINGLE\t";
else:
for gene1 in spec_genes[spec]:
for gene2 in spec_genes[spec]:
if gene2 == gene1 or [gene1,gene2] in done or [gene2,gene1] in done:
continue;
done.append([gene1,gene2]);
outline += gene1 + "-" + gene2 + "-";
cur_lca = RT.LCA([gene1,gene2],ginfo)[0];
if dups[cur_lca] == 1:
outline += "PARALOG\t";
else:
outline += "HOMOEOLOG\t";
orthfile.write(outline[:-1] + "\n");
lfile.close();
orthfile.close();
| gwct/grampa | lib/orth_label.py | Python | gpl-3.0 | 2,091 |
# Copyright Bors LTD
# This file is part of django-icons-mimetypes.
#
# django-icons-mimetypes is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Django-icons-mimetypes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with django-icons-mimetypes. If not, see <http://www.gnu.org/licenses/>.
from django.template import Template, Context, TemplateSyntaxError
from django.test import TestCase
from icons_mimetypes.templatetags import mimetypes
class IconTestCase(TestCase):
def test_icon_mimetype(self):
self.assertEqual(mimetypes.icon("image/jpeg"), "/static/mimetypes/64x64/image-x-generic.png")
def test_icon_mimetype_size(self):
self.assertEqual(mimetypes.icon("image/jpeg", size="32x32"), "/static/mimetypes/32x32/image-x-generic.png")
class TagTestCase(TestCase):
def render(self, template, **kwargs):
self.context = Context(dict_=kwargs)
return Template("{% load mimetypes %}" + template).render(self.context)
def test_mimetype(self):
self.assertEqual(self.render("""{% icon "image/jpeg" %}"""),
"/static/mimetypes/64x64/image-x-generic.png")
def test_mimetype_size(self):
self.assertEqual(self.render("""{% icon "image/jpeg" "32x32" %}"""),
"/static/mimetypes/32x32/image-x-generic.png")
def test_mimetype_size_var(self):
self.assertEqual(self.render("""{% icon "image/jpeg" size %}""", size="16x16"),
"/static/mimetypes/16x16/image-x-generic.png")
def test_mimetype_var(self):
self.assertEqual(self.render("""{% icon mimetype %}""", mimetype="image/jpeg"),
"/static/mimetypes/64x64/image-x-generic.png")
def test_mimetype_var_size(self):
self.assertEqual(self.render("""{% icon mimetype "16x16" %}""", mimetype="image/jpeg"),
"/static/mimetypes/16x16/image-x-generic.png")
def test_mimetype_var_size_var(self):
self.assertEqual(self.render("""{% icon mimetype size %}""", mimetype="image/jpeg", size="24x24"),
"/static/mimetypes/24x24/image-x-generic.png")
def test_mimetype_as_varname(self):
self.assertEqual(self.render("""{% icon "application/pdf" as pdf_icon %}"""), "")
self.assertEqual(self.context['pdf_icon'], "/static/mimetypes/64x64/application-pdf.png")
def test_mimetype_size_as_varname(self):
self.assertEqual(self.render("""{% icon "application/pdf" "32x32" as pdf_icon %}"""), "")
self.assertEqual(self.context['pdf_icon'], "/static/mimetypes/32x32/application-pdf.png")
def test_mimetype_size_var_as_varname(self):
self.assertEqual(self.render("""{% icon "application/pdf" size as pdf_icon %}""", size="16x16"), "")
self.assertEqual(self.context['pdf_icon'], "/static/mimetypes/16x16/application-pdf.png")
def test_mimetype_var_as_varname(self):
self.assertEqual(self.render("""{% icon mimetype as pdf_icon %}""", mimetype="application/pdf"), "")
self.assertEqual(self.context['pdf_icon'], "/static/mimetypes/64x64/application-pdf.png")
def test_mimetype_var_size_as_varname(self):
self.assertEqual(self.render("""{% icon mimetype "32x32" as pdf_icon %}""", mimetype="application/pdf"), "")
self.assertEqual(self.context['pdf_icon'], "/static/mimetypes/32x32/application-pdf.png")
def test_mimetype_var_size_var_as_varname(self):
self.assertEqual(self.render("""{% icon mimetype size as pdf_icon %}""", mimetype="application/pdf",
size="16x16"),
"")
self.assertEqual(self.context['pdf_icon'], "/static/mimetypes/16x16/application-pdf.png")
def test_errors(self):
self.assertRaises(TemplateSyntaxError, self.render, """{% icon %}""")
self.assertRaises(TemplateSyntaxError, self.render, """{% icon "image/jpeg" as %}""")
self.assertRaises(TemplateSyntaxError, self.render, """{% icon "image/jpeg" "32x32" as %}""")
| bors-ltd/django-icons-mimetypes | icons_mimetypes_dev/tests/test_templatetags.py | Python | gpl-3.0 | 4,507 |
from __future__ import with_statement
import unittest
from flask import Flask, Response
from flask_principal import Principal, Permission, Denial, RoleNeed, \
PermissionDenied, identity_changed, Identity, identity_loaded
anon_permission = Permission()
admin_permission = Permission(RoleNeed('admin'))
admin_or_editor = Permission(RoleNeed('admin'), RoleNeed('editor'))
editor_permission = Permission(RoleNeed('editor'))
admin_denied = Denial(RoleNeed('admin'))
def _on_principal_init(sender, identity):
if identity.id == 'ali':
identity.provides.add(RoleNeed('admin'))
class ReraiseException(Exception):
"""For checking reraising"""
def mkapp(with_factory=False):
app = Flask(__name__)
app.secret_key = 'notverysecret'
app.debug = True
if with_factory:
p = Principal()
p.init_app(app)
else:
p = Principal(app)
identity_loaded.connect(_on_principal_init)
@app.route('/')
def index():
with admin_permission.require():
pass
return Response('hello')
@app.route('/a')
@admin_permission.require()
def a():
return Response('hello')
@app.route('/b')
@anon_permission.require()
def b():
return Response('hello')
@app.route('/c')
def c():
with anon_permission.require():
raise ReraiseException
@app.route('/d')
@anon_permission.require()
def d():
raise ReraiseException
@app.route('/e')
def e():
i = mkadmin()
identity_changed.send(app, identity=i)
with admin_permission.require():
return Response('hello')
@app.route('/f')
def f():
i = mkadmin()
identity_changed.send(app, identity=i)
with admin_or_editor.require():
return Response('hello')
@app.route('/g')
@admin_permission.require()
@editor_permission.require()
def g():
return Response('hello')
@app.route('/h')
def h():
i = Identity('james')
identity_changed.send(app, identity=i)
with admin_permission.require():
with editor_permission.require():
pass
@app.route('/j')
def j():
i = Identity('james')
identity_changed.send(app, identity=i)
with admin_permission.require(403):
with editor_permission.require(403):
pass
@app.route('/k')
@admin_permission.require(403)
def k():
return Response('hello')
@app.route('/l')
def l():
s = []
if not admin_or_editor:
s.append("not admin")
i = Identity('ali')
identity_changed.send(app, identity=i)
if admin_or_editor:
s.append("now admin")
return Response('\n'.join(s))
@app.route("/m")
def m():
with admin_denied.require():
pass
return Response("OK")
@app.route("/n")
def n():
i = mkadmin()
identity_changed.send(app, identity=i)
with admin_denied.require():
pass
return Response("OK")
@app.route("/o")
def o():
admin_or_editor.test()
return Response("OK")
@app.route("/p")
def p():
admin_or_editor.test(404)
return Response("OK")
return app
def mkadmin():
i = Identity('ali')
return i
class PrincipalUnitTests(unittest.TestCase):
def test_permission_union(self):
p1 = Permission(('a', 'b'))
p2 = Permission(('a', 'c'))
p3 = p1.union(p2)
assert p1.issubset(p3)
assert p2.issubset(p3)
def test_permission_difference(self):
p1 = Permission(('a', 'b'), ('a', 'c'))
p2 = Permission(('a', 'c'), ('d', 'e'))
p3 = p1.difference(p2)
assert p3.needs == set([('a', 'b')])
p4 = p2.difference(p1)
assert p4.needs == set([('d', 'e')])
def test_permission_union_denial(self):
p1 = Permission(('a', 'b'))
p2 = Denial(('a', 'c'))
p3 = p1.union(p2)
assert p1.issubset(p3)
assert p2.issubset(p3)
def test_permission_difference_denial(self):
p1 = Denial(('a', 'b'), ('a', 'c'))
p2 = Denial(('a', 'c'), ('d', 'e'))
p3 = p1.difference(p2)
assert p3.excludes == set([('a', 'b')])
p4 = p2.difference(p1)
assert p4.excludes == set([('d', 'e')])
def test_reverse_permission(self):
p = Permission(('a', 'b'))
d = p.reverse()
assert ('a', 'b') in d.excludes
def test_permission_and(self):
p1 = Permission(RoleNeed('boss'))
p2 = Permission(RoleNeed('lackey'))
p3 = p1 & p2
p4 = p1.union(p2)
assert p3.needs == p4.needs
def test_permission_or(self):
p1 = Permission(RoleNeed('boss'), RoleNeed('lackey'))
p2 = Permission(RoleNeed('lackey'), RoleNeed('underling'))
p3 = p1 | p2
p4 = p1.difference(p2)
assert p3.needs == p4.needs
def test_contains(self):
p1 = Permission(RoleNeed('boss'), RoleNeed('lackey'))
p2 = Permission(RoleNeed('lackey'))
assert p2.issubset(p1)
assert p2 in p1
class PrincipalApplicationTests(unittest.TestCase):
def setUp(self):
self.client = mkapp().test_client()
def test_deny_with(self):
self.assertRaises(PermissionDenied, self.client.open, '/')
def test_deny_view(self):
self.assertRaises(PermissionDenied, self.client.open, '/a')
def test_allow_view(self):
assert self.client.open('/b').data == b'hello'
def test_reraise(self):
self.assertRaises(ReraiseException, self.client.open, '/c')
def test_error_view(self):
self.assertRaises(ReraiseException, self.client.open, '/d')
def test_identity_changed(self):
assert self.client.open('/e').data == b'hello'
def test_identity_load(self):
assert self.client.open('/e').data == b'hello'
assert self.client.open('/a').data == b'hello'
def test_or_permissions(self):
assert self.client.open('/e').data == b'hello'
assert self.client.open('/f').data == b'hello'
def test_and_permissions_view_denied(self):
self.assertRaises(PermissionDenied, self.client.open, '/g')
def test_and_permissions_view(self):
self.assertRaises(PermissionDenied, self.client.open, '/g')
def test_and_permissions_view_with_http_exc(self):
response = self.client.open("/j")
assert response.status_code == 403
def test_and_permissions_view_with_http_exc_decorated(self):
response = self.client.open("/k")
assert response.status_code == 403
def test_and_permissions_view_with_custom_errhandler(self):
app = mkapp()
@app.errorhandler(403)
def handle_permission_denied(error):
assert error.description == admin_permission
return Response("OK")
self.client = app.test_client()
response = self.client.open("/k")
assert response.status_code == 200
def test_permission_bool(self):
response = self.client.open('/l')
assert response.status_code == 200
assert b'not admin' in response.data
assert b'now admin' in response.data
def test_denied_passes(self):
response = self.client.open("/m")
assert response.status_code == 200
def test_denied_fails(self):
self.assertRaises(PermissionDenied, self.client.open, '/n')
def test_permission_test(self):
self.assertRaises(PermissionDenied, self.client.open, '/o')
def test_permission_test_with_http_exc(self):
response = self.client.open("/p")
assert response.status_code == 404
class FactoryMethodPrincipalApplicationTests(PrincipalApplicationTests):
def setUp(self):
self.client = mkapp(with_factory=True).test_client()
| MichiganLabs/flask-principal | test_principal.py | Python | mit | 7,923 |
# Copyright (c) 2012-2022, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
def validate_homedirectory_type(homedirectory_type):
"""
Validate HomeDirectoryType for User
Property: User.HomeDirectoryType
"""
VALID_HOMEDIRECTORY_TYPE = ("LOGICAL", "PATH")
if homedirectory_type not in VALID_HOMEDIRECTORY_TYPE: # NOQA
raise ValueError(
"User HomeDirectoryType must be one of: %s"
% ", ".join(VALID_HOMEDIRECTORY_TYPE) # NOQA
)
return homedirectory_type
| cloudtools/troposphere | troposphere/validators/transfer.py | Python | bsd-2-clause | 563 |
"""
Encrypts secrets using kms key wrapping.
Usage:
kms_wrap
[--region=<region_name>]
[--profile=<profile name>]
--key <key_alias_or_id>
Options:
-r --region=<region_name> AWS Region Name
-p --profile=<profile_name> the name of the profile to use to connect to aws
-k --key=<key_alias_or_id> the alias or id of the kms key to use
"""
from argparse import ArgumentParser
import os
import sys
from .kms_wrapped import encrypt
def get_parser():
parser = ArgumentParser()
parser.add_argument(
'-r', '--region',
dest='region',
metavar='region_name',
help='aws region name, e.g., us-east-2',
default=None,
)
parser.add_argument(
'-p', '--profile',
dest='profile',
metavar='profile_name',
help='the name of the profile to use when connecting to aws',
default=None,
)
parser.add_argument(
'-k', '--key',
dest='key',
metavar='<id or alias>',
help='the name of the key to use for encryption',
)
return parser
def main():
parser = get_parser()
args = parser.parse_args()
# do not print prompt if input is being piped
if sys.stdin.isatty():
print('Enter plaintext: ', end='', file=sys.stderr)
sys.stderr.flush()
stdin = os.fdopen(sys.stdin.fileno(), 'rb', 0)
plain_text = stdin.readline()
plain_text = plain_text.decode('utf-8').rstrip()
value = encrypt(
plain_text,
alias=args.key,
profile=args.profile,
region=args.region)
print(f'{value}')
if __name__ == "__main__":
main()
| 2ps/djenga | djenga/encryption/kms_wrap.py | Python | bsd-3-clause | 1,631 |
from Queue import Queue, Empty
import threading
import traceback
import inspect
import os, stat
def isiter(obj):
'''
test whether an object conforms to the iterator protocol
'''
try:
if obj.__iter__() == obj and obj.next:
return True
except AttributeError:
pass
return False
class stack_size:
def __init__(self, stack_size):
self.stack_size = stack_size
self.old_size = threading.stack_size()
def __enter__(self):
threading.stack_size(self.old_size)
def __exit__(self, type, value, traceback):
threading.stack_size(self.stack_size)
class Lock:
def __init__(self):
self.lock = threading.Lock()
def __enter__(self):
self.lock.acquire()
def __exit__(self, type, value, traceback):
self.lock.release()
class FilenameLogger:
def __init__(self, filename):
self.filename = filename
self.filepath = os.path.join(os.getcwd(), filename)
self.fileobj = open(self.filepath, 'a')
def __call__(self, msg):
# someone renamed our file! maybe logrotate'd. open a new one.
if not os.path.exists(self.filepath) or os.stat(self.filepath)[stat.ST_INO] != os.fstat(self.fileobj.fileno())[stat.ST_INO]:
self.fileobj = open(self.filepath, 'a')
self.fileobj.write(msg+'\n')
class FileObjLogger:
def __init__(self, fileobj=None, filename=None):
self.fileobj = fileobj or open(filename, 'a') if filename else None
def __call__(self, msg):
if self.fileobj and not self.fileobj.closed:
self.fileobj.write(msg+'\n')
class ThreadSafeLogger:
'''
Init with a list of loggers to call when this class is called()
"None" logs to stdout, string obj logs to filename, fileobj logs to write(), callable(obj) acts as a callback.
formatter=callback allows you to set a callback which can rewrite the message passed to log
separator=str allows you to set the separator between args passed
'''
def __init__(self, *args, **kwargs):
self.formatter = None
self.separator = ', '
if 'formatter' in kwargs and callable(kwargs['formatter']):
self.formatter = kwargs['formatter']
if 'separator' in kwargs:
self.separator = kwargs['separator']
loggers = set()
for logger in args:
if hasattr(logger, 'write') and callable(logger.write):
loggers.add(FileObjLogger(fileobj=logger))
elif isinstance(logger, basestring):
loggers.add(FilenameLogger(logger))
elif callable(logger):
loggers.add(logger)
else:
loggers.add(self.echo)
self.loggers = loggers or (self.echo,)
self.lock = Lock()
def __call__(self, *args):
with self.lock:
self.callback(*args)
def callback(self, *args):
msg = self.separator.join(str(arg) for arg in args)
if self.formatter:
msg = self.formatter(msg)
for logger in self.loggers:
logger(msg)
def echo(self, msg):
print msg
class ThreadPool:
def __init__(self, max_threads, log_returns=False, catch_returns=False, logger=None, stack_size=0, return_queue=1000):
self.lock = threading.Lock()
self.max = max_threads
self.logger = logger or (lambda *x: None)
self.stack_size = stack_size
self.log_returns = log_returns
self.catch_returns = catch_returns
self.call_queue = Queue()
self.returns = Queue(return_queue)
self.spawn_workers()
def __call__(self, func):
def wrapper(*args, **kwargs):
self.call_queue.put((func, args, kwargs))
return wrapper
def spawn_workers(self):
for i in xrange(self.max):
thread = threading.Thread(target=self.worker, args=(self.call_queue,))
thread.daemon = True
thread.start()
def worker(self, call):
while True:
func, args, kwargs = call.get()
try:
result = func(*args, **kwargs)
if self.catch_returns or self.log_returns:
if inspect.isgenerator(result) or isiter(result):
for x in result:
self.returned(x)
else:
self.returned(result)
except:
self.logger(traceback.format_exc())
finally:
call.task_done()
def returned(self, result):
if self.log_returns:
self.logger(result)
if self.catch_returns:
self.returns.put(result)
def pop(self):
'''
pop a result from the queue, blocks if we have none
'''
if self.catch_returns:
result = self.returns.get()
self.returns.task_done()
return result
def iter(self):
'''
act as a generator, returning results as they happen
this method assumes you've already queued all of your calls
'''
if not self.catch_returns:
raise Exception
while self.call_queue.unfinished_tasks > 0:
try:
yield self.returns.get(timeout=0.1)
except Empty:
pass
for value in self.finish():
yield value
def flush(self):
'''
clear and return the function returns queue
'''
if self.catch_returns:
results = tuple(self.returns.queue)
self.returns = Queue()
return results
return ()
def finish(self):
'''
wait for queue to finish, then return flush()
'''
self.call_queue.join()
return self.flush()
if __name__ == '__main__':
log = ThreadSafeLogger()
# don't use catch_returns unless you're going to use them - otherwise they will never clear and just leak memory
pool = ThreadPool(100, logger=log, log_returns=True, catch_returns=True)
import time
@pool
def test(i):
log('from thread', i)
yield 'yield %i' % i
# sleep to show how awesome it is that we allow generators
time.sleep(0.5)
yield 'yield %i' % (i*10)
for i in xrange(1,6):
test(i)
# because these pops will all happen before the sleep is finished, we'll get numbers < 100
log('first pop')
for i in xrange(5):
log(pool.pop())
# just to make sure it works
pool.flush()
# because these pops will happen after the sleep, we'll get numbers over 100
results = pool.finish()
print results
| lunixbochs/meta | snippets/python/threadpool.py | Python | mit | 6,725 |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
from bokeh.util.api import public, internal ; public, internal
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from contextlib import contextmanager
# External imports
# Bokeh imports
from ..core.templates import DOC_JS
from ..core.json_encoder import serialize_json
from ..settings import settings
from ..util.string import encode_utf8
from .util import FromCurdoc
from .util import check_one_model_or_doc, div_for_render_item, find_existing_docs, standalone_docs_json_and_render_items
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Public API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Internal API
#-----------------------------------------------------------------------------
@internal((1,0,0))
def notebook_content(model, notebook_comms_target=None, theme=FromCurdoc):
''' Return script and div that will display a Bokeh plot in a Jupyter
Notebook.
The data for the plot is stored directly in the returned HTML.
Args:
model (Model) : Bokeh object to render
notebook_comms_target (str, optional) :
A target name for a Jupyter Comms object that can update
the document that is rendered to this notebook div
theme (Theme, optional) :
Defaults to the ``Theme`` instance in the current document.
Setting this to ``None`` uses the default theme or the theme
already specified in the document. Any other value must be an
instance of the ``Theme`` class.
Returns:
script, div, Document
.. note::
Assumes :func:`~bokeh.io.notebook.load_notebook` or the equivalent
has already been executed.
'''
model = check_one_model_or_doc(model)
# Comms handling relies on the fact that the new_doc returned here
# has models with the same IDs as they were started with
with _ModelInEmptyDocument(model, apply_theme=theme) as new_doc:
(docs_json, render_items) = standalone_docs_json_and_render_items([model])
item = render_items[0]
if notebook_comms_target:
item['notebook_comms_target'] = notebook_comms_target
else:
notebook_comms_target = ''
script = DOC_JS.render(
docs_json=serialize_json(docs_json),
render_items=serialize_json(render_items)
)
div = div_for_render_item(item)
return encode_utf8(script), encode_utf8(div), new_doc
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
@contextmanager
def _ModelInEmptyDocument(model, apply_theme=None):
# Note: Comms handling relies on the fact that the new_doc returned
# has models with the same IDs as they were started with
from ..document import Document
doc = find_existing_docs([model])
if apply_theme is FromCurdoc:
from ..io import curdoc; curdoc
doc.theme = curdoc().theme
elif apply_theme is not None:
doc.theme = apply_theme
model._document = None
for ref in model.references():
ref._document = None
new_doc = Document()
new_doc.add_root(model)
if settings.perform_document_validation():
new_doc.validate()
yield new_doc
model._document = doc
for ref in model.references():
ref._document = doc
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| rs2/bokeh | bokeh/embed/notebook.py | Python | bsd-3-clause | 4,638 |
# Copyright 2010 Pablo Arrighi, Alex Concha, Miguel Lezama for version 1.
# Copyright 2013 Pablo Arrighi, Miguel Lezama, Kevin Mazet for version 2.
#
# This file is part of GOOL.
#
# GOOL is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, version 3.
#
# GOOL is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License version 3 for more details.
#
# You should have received a copy of the GNU General Public License along with GOOL,
# in the file COPYING.txt. If not, see <http://www.gnu.org/licenses/>.
import os
class File(object):
def __init__(self, *args):
if len(args) == 1 and isinstance(args[0], str):
self.inializeFromStr(args[0])
def inializeFromStr(self, pathname):
self.pathname = pathname
def getAbsolutePath(self):
return os.path.abspath(self.pathname)
def getName(self):
return self.pathname
def mkdir(self):
if not os.path.exists(self.pathname):
os.makedirs(self.pathname)
return True
return False
class FileReader(object):
def __init__(self, *args):
if len(args) == 1:
if isinstance(args[0], str):
self.inializeFromStr(args[0])
elif isinstance(args[0], File):
self.inializeFromFile(args[0])
def inializeFromStr(self, pathname):
self.file = open(pathname, 'r')
def inializeFromFile(self, filee):
self.file = open(filee.getAbsolutePath(), 'r')
def read(self, *args):
return self.file.read(1)
def getFile(self):
return self.file
def close(self):
return self.file.close()
class FileWriter(object):
def __init__(self, *args):
if len(args) == 1:
if isinstance(args[0], str):
self.inializeFromStr(args[0])
elif isinstance(args[0], File):
self.inializeFromFile(args[0])
def inializeFromStr(self, pathname):
self.file = open(pathname, 'a')
def inializeFromFile(self, filee):
self.file = open(filee.getAbsolutePath(), 'a')
def write(self, *args):
nbArgs = len(args)
if nbArgs == 1:
self.writeStr(args[0])
elif nbArgs==3 and isinstance(args[0], str) and isinstance(args[1], int) and isinstance(args[2], int):
self.writeStr(args[0][args[1]:args[2]])
def writeStr(self, string):
self.file.write(string)
def getFile(self):
return self.file
def close(self):
return self.file.close()
class BufferedReader(object):
def __init__(self, *args):
if len(args) == 1:
if isinstance(args[0], FileReader):
self.inializeFromStr(args[0])
def inializeFromStr(self, reader):
self.reader = reader
def read(self):
return self.reader.read()
def readLine(self):
return self.reader.getFile().readline()
def close(self):
return self.reader.close()
| darrivau/GOOL | src/main/resources/gool/generator/python/goolHelper/IO.py | Python | gpl-3.0 | 2,848 |
#
# Copyright (c) 2014 Nick Douma < n.douma [at] nekoconeko . nl >
#
# This file is part of imagecat.
#
# imagecat is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# imagecat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with imagecat. If not, see <http://www.gnu.org/licenses/>.
#
class SettingsWrapper(object):
""" Base class for manipulating GConfig / GSettings. """
def _init_settings(self, profile=None):
""" Calls to this functions should cause the internal settings to load or refresh. """
raise NotImplementedError("_init_settings is not implemented.")
def _check_list_type(self, lst, tp):
"""
Performs a simple validation of the given list against the given type,
by filtering the list against the type. If the filtered list has a different
length than the input, this function returns False.
"""
if tp == "hex":
try:
filtered = map(lambda x: int(x.replace("#", ""), 16), lst)
except:
return False
else:
filtered = filter(lambda x: isinstance(x, tp), lst)
return len(lst) == len(filtered)
def _get_string_array(self, key):
"""Retrieve a list of string from a key."""
raise NotImplementedError("_get_string_array is not implemented.")
def _get_int_array(self, key):
"""Retrieve a list of integers from a key."""
raise NotImplementedError("_get_int_array is not implemented.")
def _get_int(self, key):
"""Retrieve a single integer from a key."""
raise NotImplementedError("_get_int is not implemented.")
def _set_hexstring_array(self, key, lst):
""" Set a list of hexadecimal strings to a key. The input must be validated. """
raise NotImplementedError("_set_hexstring_array not implemented.")
def _set_string_array(self, key, lst):
""" Set a list of strings to a keys. The input must be validated. """
raise NotImplementedError("_set_string_array not implemented.")
def _set_int_array(self, key, lst):
""" Set a list of integers to a keys. The input must be validated. """
raise NotImplementedError("_set_int_array not implemented.")
def delay(self):
""" Enable delay mode. Revert active changes with revert(). Apply them with apply(). """
raise NotImplementedError("delay not implemented.")
def revert(self):
""" Revert active changes. Only valid after delay(). """
raise NotImplementedError("revert not implemented.")
def apply(self):
""" Apply active changes. Only valid after delay(). """
raise NotImplementedError("apply not implemented.")
| LordGaav/imagecat | imagecat/settingswrapper.py | Python | gpl-3.0 | 2,896 |
import unittest
from conans.test.tools import TestClient
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from nose.plugins.attrib import attr
@attr("slow")
class ConanTestTest(unittest.TestCase):
def _create(self, client, number, version, deps=None, export=True):
files = cpp_hello_conan_files(number, version, deps)
client.save(files)
if export:
client.run("export lasote/stable")
def conan_test_test(self):
client = TestClient()
files = cpp_hello_conan_files("Hello0", "0.1")
test_conanfile = '''
from conans import ConanFile, CMake
import os
class HelloReuseConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "Hello0/0.1@ lasote/stable"
generators = "cmake"
def build(self):
cmake = CMake(self.settings)
self.run('cmake "%s" %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def test(self):
# equal to ./bin/greet, but portable win: .\bin\greet
self.run(os.sep.join([".","bin", "greet"]))
'''
cmakelist = """PROJECT(MyHello)
cmake_minimum_required(VERSION 2.8)
include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)
conan_basic_setup()
ADD_EXECUTABLE(greet main.cpp)
TARGET_LINK_LIBRARIES(greet ${CONAN_LIBS})
"""
files["test_package/CMakeLists.txt"] = cmakelist
files["test_package/conanfile.py"] = test_conanfile
files["test_package/main.cpp"] = files["main.cpp"]
client.save(files)
client.run("export lasote/stable")
error = client.run("test -s build_type=Release")
self.assertFalse(error)
self.assertIn('Hello Hello0', client.user_io.out)
error = client.run("test -s build_type=Release -o Hello0:language=1")
self.assertFalse(error)
self.assertIn('Hola Hello0', client.user_io.out)
| dragly/conan | conans/test/integration/conan_test_test.py | Python | mit | 1,929 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import json
from config import Config
class DozensApi(object):
"""
Wrapping DozensAPI
(https://sites.google.com/a/dozens.jp/docs/)
This class do not wrap whole apis.
Implementing minimum APIs to maitan A records.
"""
def __init__(self):
request_token = self._get_request_token()
self.common_headers = {
"X-Auth-Token": request_token,
"Content-Type": " application/json"
}
def _get_request_token(self):
url = 'http://dozens.jp/api/authorize.json'
headers = {
"X-Auth-User": Config.DOZENS_ID,
"X-Auth-Key": Config.DOZENS_APIKEY
}
r = requests.get(url, headers = headers)
r.raise_for_status()
return r.json()["auth_token"]
def get_records(self, zone_name):
url = "http://dozens.jp/api/record/%s.json" % zone_name
r = self._get(url)
return r.json()
def post_update_record(self, record_id, prio, content, ttl):
url = "http://dozens.jp/api/record/update/%s.json" % record_id
payload = {"prio": prio,
"content": content,
"ttl": ttl}
r = self._post(url, payload)
return r.json()
def post_create_record(self, domain, name,
record_type, prio, content, ttl):
url = "http://dozens.jp/api/record/create.json"
payload = {"domain": domain,
"name": name,
"type": record_type,
"prio": prio,
"content": content,
"ttl": ttl}
r = self._post(url, payload)
return r.json()
def _get(self, url):
r = requests.get(url, headers = self.common_headers)
r.raise_for_status()
return r
def _post(self, url, payload):
r = requests.post(url, data = json.dumps(payload),
headers = self.common_headers)
r.raise_for_status()
return r
| LS1qJ/IP-Updater-For-Heroku | dozens_ip_updater/dozens_api.py | Python | mit | 2,062 |
from prudentia.domain import Box
from prudentia.simple import SimpleProvider, SimpleCli
from prudentia.utils import io
class SshCli(SimpleCli):
def __init__(self):
SimpleCli.__init__(self)
self.prompt = '(Prudentia > Ssh) '
self.provider = SshProvider()
class SshProvider(SimpleProvider):
NAME = 'ssh'
def __init__(self):
super(SshProvider, self).__init__(self.NAME)
def define_box(self):
playbook = io.input_path('playbook path')
hostname = self.fetch_box_hosts(playbook)
name = io.input_value('box name', self.suggest_name(hostname))
ip = io.input_value('instance address or inventory')
user = io.input_value('remote user', self.active_user)
pwd = io.input_value(
'password for the remote user',
default_description='ssh key',
mandatory=False,
hidden=True
)
return Box(name, playbook, hostname, ip, user, pwd)
def redefine_box(self, previous_box):
playbook = io.input_path('playbook path', previous_box.playbook)
hostname = self.fetch_box_hosts(playbook)
ip = io.input_value('instance address or inventory', previous_box.ip)
user = io.input_value('remote user', previous_box.remote_user)
if previous_box.remote_pwd:
pwd = io.input_value(
'password for the remote user',
default_value=previous_box.remote_pwd,
default_description='*****',
mandatory=False,
hidden=True
)
else:
pwd = io.input_value(
'password for the remote user',
default_description='ssh key',
mandatory=False,
hidden=True
)
return Box(previous_box.name, playbook, hostname, ip, user, pwd)
| StarterSquad/prudentia | prudentia/ssh.py | Python | mit | 1,877 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example adds an ad extension override to a given campaign. To get
campaigns, run get_campaigns.py.
Tags: GeoLocationService.get, AdExtensionOverrideService.mutate
Api: AdWordsOnly
"""
__author__ = 'api.kwinter@gmail.com (Kevin Winter)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import AdWordsClient
ad_id = 'INSERT_AD_GROUP_AD_ID_HERE'
ad_extension_id = 'INSERT_AD_EXTENSION_ID_HERE'
def main(client, ad_id, ad_extension_id):
# Initialize appropriate service.
geo_location_service = client.GetGeoLocationService(
'https://adwords-sandbox.google.com', 'v201109_1')
ad_extension_override_service = client.GetAdExtensionOverrideService(
'https://adwords-sandbox.google.com', 'v201109_1')
# Construct selector and get geo location info for a given address.
selector = {
'addresses': [
{
'streetAddress': '1600 Amphitheatre Parkway',
'cityName': 'Mountain View',
'provinceCode': 'US-CA',
'provinceName': 'California',
'postalCode': '94043',
'countryCode': 'US'
}
]
}
geo_location = geo_location_service.Get(selector)[0]
# Construct operations and add ad extension override.
operations = [
{
'operator': 'ADD',
'operand': {
'adId': ad_id,
'adExtension': {
'xsi_type': 'LocationExtension',
'id': ad_extension_id,
'address': geo_location['address'],
'geoPoint': geo_location['geoPoint'],
'encodedLocation': geo_location['encodedLocation'],
'source': 'ADWORDS_FRONTEND',
# Optional fields.
'companyName': 'ACME Inc.',
'phoneNumber': '(650) 253-0000'
# 'iconMediaId': '...',
# 'imageMediaId': '...'
},
# Optional fields.
'overrideInfo': {
'LocationOverrideInfo': {
'radius': '5',
'radiusUnits': 'MILES'
}
}
}
}
]
ad_extensions = ad_extension_override_service.Mutate(operations)[0]
# Display results.
for ad_extension in ad_extensions['value']:
print ('Ad extension override with id \'%s\' for ad with id \'%s\' was '
'added.' % (ad_extension['adExtension']['id'], ad_extension['adId']))
print
print ('Usage: %s units, %s operations' % (client.GetUnits(),
client.GetOperations()))
if __name__ == '__main__':
# Initialize client object.
client = AdWordsClient(path=os.path.join('..', '..', '..', '..', '..'))
main(client, ad_id, ad_extension_id)
| nearlyfreeapps/python-googleadwords | examples/adspygoogle/adwords/v201109_1/campaign_management/add_location_extension_override.py | Python | apache-2.0 | 3,540 |
#proxy12.py
#Proxy object for all modules that have a mib12 executive
import proxy
from pymomo.utilities.typedargs.annotate import returns, param, annotated
from collections import namedtuple
from pymomo.commander.exceptions import *
#Printer functions for displaying return values.
def print_status(status):
"""
Break out executive status bits and print them.
"""
print "Executive Status Register"
print "Serial Number: %d" % status.serial
print "HW Type: %d" % status.hwtype
print "First Application Row: %d" % status.approw
print "Runtime Status: 0x%X" % status.status
if status.trapped:
print "\n***Module has crashed and is waiting for debugging; trap bit is set.***\n"
else:
print "\nModule is running normally.\n"
class MIB12ProxyObject (proxy.MIBProxyObject):
"""
Proxy object for all 8-bit PIC modules that run the pic12_executive.
Executive functionality is implemented here.
"""
@returns(desc='application firmware checksum', data=True)
def checksum(self):
"""
Get the 8-bit application checksum.
"""
return self.rpc(1,2, result_type=(1,False))['ints'][0]
@returns(desc='module status register', data=True, printer=print_status)
def status(self):
"""
Get the module status register.
Returns executive version, runtime parameters, hw type, executive size and whether the module has crashed.
"""
res = self.rpc(1,4, result_type=(2,False))
status = namedtuple("ExecutiveStatus", ['serial', 'hwtype', 'approw', 'status', 'trapped'])
status.serial = res['ints'][0] & 0xFF
status.hwtype = res['ints'][0] >> 8
status.approw = res['ints'][1] & 0xFF
status.status = res['ints'][1] >> 8
status.trapped = bool(status.status & 1<<7)
return status
@param('location','integer','positive',desc='RAM address to read')
@param('type', 'string', ('list', ['uint8']), desc='Type of variable to read (supports: uint8)')
@returns(desc='variable contents', data=True)
def readram(self, location, type='uint8'):
res = self.rpc(1,3, location, result_type=(0,True))
return ord(res['buffer'][0])
@annotated
def reset(self):
"""
Reset the application module.
"""
try:
self.rpc(1, 1)
except RPCException as e:
if e.type != 7:
raise e | amcgee/pymomo | pymomo/commander/proxy/proxy12.py | Python | lgpl-3.0 | 2,215 |
"""
It turns out that 12 cm is the smallest length of wire that can be bent to form an integer sided right angle triangle in exactly one way, but there are many more examples.
12 cm: (3,4,5)
24 cm: (6,8,10)
30 cm: (5,12,13)
36 cm: (9,12,15)
40 cm: (8,15,17)
48 cm: (12,16,20)
In contrast, some lengths of wire, like 20 cm, cannot be bent to form an integer sided right angle triangle, and other lengths allow more than one solution to be found; for example, using 120 cm it is possible to form exactly three different integer sided right angle triangles.
120 cm: (30,40,50), (20,48,52), (24,45,51)
Given that L is the length of the wire, for how many values of L <= 1,500,000 can exactly one integer sided right angle triangle be formed?
"""
from math import sqrt
from fractions import gcd
def calcPerimOfAllPPT(maxP):
"""Calculates the perimeter of all primitive pythagorean triples whose
perimeters are less or equal to maxP. There are no duplicates in
the PPT, but will be duplicates in the perimeters.
x = u*u - v*v
y = 2*u*v
z = u*u + v*v
It can be seen that (where p=perimeter):
p >= 2*u*u + 2*u*v
p / 2 >= u*u + u*v
fix u=1:
p/2 >= 1 + v
p/2 - 1 >= v
p/2 > v
given v:
p/2 >= u*u + u*v
0 >= u*u + u*v - p/2
u <= (-v + sqrt(v*v + 2p)) / 2
"""
ppt = []
vBound = maxP // 2
for v in range(1, vBound):
uBound = (-v + sqrt(v*v + 2*maxP)) / 2
uBound = int(uBound + 1)
#uBound = vBound
for u in range(v + 1, uBound, 2): # +2 each time ensures u+v=1(mod2)
if gcd(u, v) == 1:
p = 2 * u * (u + v)
ppt.append(p)
return ppt
def main():
N = 1500000
counts = {}
#generate perimeters from primitive triples
perims = calcPerimOfAllPPT(N)
#add in non-primitive triples
for perim in perims:
for k in range(1, N // perim + 1):
if k*perim not in counts:
counts[k*perim] = 1
else:
counts[k*perim] = counts[k*perim] + 1
#now count up all the perimeters that only occured once
c = 0
for perim in counts:
if counts[perim] == 1:
c = c + 1
print c
main()
| peterstace/project-euler | OLD_PY_CODE/project_euler_old_old/75/75.py | Python | unlicense | 2,033 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines `AdditiveExternalRegretOptimizer`.
This optimizer minimizes a `ConstrainedMinimizationProblem` by introducing
Lagrange multipliers, and using `tf.train.Optimizer`s to jointly optimize over
the model parameters and Lagrange multipliers.
For the purposes of constrained optimization, at least in theory,
external-regret minimization suffices if the `ConstrainedMinimizationProblem`
we're optimizing doesn't have any `proxy_constraints`, while swap-regret
minimization should be used if `proxy_constraints` are present.
For more specifics, please refer to:
> Cotter, Jiang and Sridharan. "Two-Player Games for Efficient Non-Convex
> Constrained Optimization".
> [https://arxiv.org/abs/1804.06500](https://arxiv.org/abs/1804.06500)
The formulation used by the AdditiveExternalRegretOptimizer--which is simply the
usual Lagrangian formulation--can be found in Definition 1, and is discussed in
Section 3. This optimizer is most similar to Algorithm 3 in Appendix C.3, with
the two differences being that it uses proxy constraints (if they're provided)
in the update of the model parameters, and uses `tf.train.Optimizer`s, instead
of SGD, for the "inner" updates.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
from tensorflow.contrib.constrained_optimization.python import constrained_optimizer
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import optimizer as train_optimizer
def _project_multipliers_wrt_euclidean_norm(multipliers, radius):
"""Projects its argument onto the feasible region.
The feasible region is the set of all vectors with nonnegative elements that
sum to at most `radius`.
Args:
multipliers: 1d tensor, the Lagrange multipliers to project.
radius: float, the radius of the feasible region.
Returns:
The 1d tensor that results from projecting `multipliers` onto the feasible
region w.r.t. the Euclidean norm.
Raises:
ValueError: if the `multipliers` tensor is not floating-point, does not have
a fully-known shape, or is not one-dimensional.
"""
if not multipliers.dtype.is_floating:
raise ValueError("multipliers must have a floating-point dtype")
multipliers_shape = multipliers.get_shape()
if multipliers_shape.ndims is None:
raise ValueError("multipliers must have known shape")
if multipliers_shape.ndims != 1:
raise ValueError(
"multipliers must be one dimensional (instead is %d-dimensional)" %
multipliers_shape.ndims)
dimension = multipliers_shape.dims[0].value
if dimension is None:
raise ValueError("multipliers must have fully-known shape")
def while_loop_condition(iteration, multipliers, inactive, old_inactive):
"""Returns false if the while loop should terminate."""
del multipliers # Needed by the body, but not the condition.
not_done = (iteration < dimension)
not_converged = standard_ops.reduce_any(
standard_ops.not_equal(inactive, old_inactive))
return standard_ops.logical_and(not_done, not_converged)
def while_loop_body(iteration, multipliers, inactive, old_inactive):
"""Performs one iteration of the projection."""
del old_inactive # Needed by the condition, but not the body.
iteration += 1
scale = standard_ops.minimum(
0.0,
(radius - standard_ops.reduce_sum(multipliers)) / standard_ops.maximum(
1.0, standard_ops.reduce_sum(inactive)))
multipliers = multipliers + (scale * inactive)
new_inactive = standard_ops.cast(multipliers > 0, multipliers.dtype)
multipliers = multipliers * new_inactive
return (iteration, multipliers, new_inactive, inactive)
iteration = standard_ops.constant(0)
inactive = standard_ops.ones_like(multipliers, dtype=multipliers.dtype)
# We actually want a do-while loop, so we explicitly call while_loop_body()
# once before tf.while_loop().
iteration, multipliers, inactive, old_inactive = while_loop_body(
iteration, multipliers, inactive, inactive)
iteration, multipliers, inactive, old_inactive = control_flow_ops.while_loop(
while_loop_condition,
while_loop_body,
loop_vars=(iteration, multipliers, inactive, old_inactive),
name="euclidean_projection")
return multipliers
@six.add_metaclass(abc.ABCMeta)
class _ExternalRegretOptimizer(constrained_optimizer.ConstrainedOptimizer):
"""Base class representing an `_ExternalRegretOptimizer`.
This class contains most of the logic for performing constrained
optimization, minimizing external regret for the constraints player. What it
*doesn't* do is keep track of the internal state (the Lagrange multipliers).
Instead, the state is accessed via the _initial_state(),
_lagrange_multipliers(), _constraint_grad_and_var() and _projection_op()
methods.
The reason for this is that we want to make it easy to implement different
representations of the internal state.
For more specifics, please refer to:
> Cotter, Jiang and Sridharan. "Two-Player Games for Efficient Non-Convex
> Constrained Optimization".
> [https://arxiv.org/abs/1804.06500](https://arxiv.org/abs/1804.06500)
The formulation used by `_ExternalRegretOptimizer`s--which is simply the usual
Lagrangian formulation--can be found in Definition 1, and is discussed in
Section 3. Such optimizers are most similar to Algorithm 3 in Appendix C.3.
"""
def __init__(self, optimizer, constraint_optimizer=None):
"""Constructs a new `_ExternalRegretOptimizer`.
The difference between `optimizer` and `constraint_optimizer` (if the latter
is provided) is that the former is used for learning the model parameters,
while the latter us used for the Lagrange multipliers. If no
`constraint_optimizer` is provided, then `optimizer` is used for both.
Args:
optimizer: tf.train.Optimizer, used to optimize the objective and
proxy_constraints portion of the ConstrainedMinimizationProblem. If
constraint_optimizer is not provided, this will also be used to optimize
the Lagrange multipliers.
constraint_optimizer: optional tf.train.Optimizer, used to optimize the
Lagrange multipliers.
Returns:
A new `_ExternalRegretOptimizer`.
"""
super(_ExternalRegretOptimizer, self).__init__(optimizer=optimizer)
self._constraint_optimizer = constraint_optimizer
@property
def constraint_optimizer(self):
"""Returns the `tf.train.Optimizer` used for the Lagrange multipliers."""
return self._constraint_optimizer
@abc.abstractmethod
def _initial_state(self, num_constraints):
pass
@abc.abstractmethod
def _lagrange_multipliers(self, state):
pass
@abc.abstractmethod
def _constraint_grad_and_var(self, state, gradient):
pass
@abc.abstractmethod
def _projection_op(self, state, name=None):
pass
def _minimize_constrained(self,
minimization_problem,
global_step=None,
var_list=None,
gate_gradients=train_optimizer.Optimizer.GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
name=None,
grad_loss=None):
"""Returns an `Operation` for minimizing the constrained problem.
The `optimizer` constructor parameter will be used to update the model
parameters, while the Lagrange multipliers will be updated using
`constrained_optimizer` (if provided) or `optimizer` (if not).
Args:
minimization_problem: ConstrainedMinimizationProblem, the problem to
optimize.
global_step: as in `tf.train.Optimizer`'s `minimize` method.
var_list: as in `tf.train.Optimizer`'s `minimize` method.
gate_gradients: as in `tf.train.Optimizer`'s `minimize` method.
aggregation_method: as in `tf.train.Optimizer`'s `minimize` method.
colocate_gradients_with_ops: as in `tf.train.Optimizer`'s `minimize`
method.
name: as in `tf.train.Optimizer`'s `minimize` method.
grad_loss: as in `tf.train.Optimizer`'s `minimize` method.
Raises:
ValueError: If the minimization_problem tensors have different dtypes.
Returns:
`Operation`, the train_op.
"""
objective = minimization_problem.objective
constraints = minimization_problem.constraints
proxy_constraints = minimization_problem.proxy_constraints
if proxy_constraints is None:
proxy_constraints = constraints
# Make sure that the objective, constraints and proxy constraints all have
# the same dtype.
if (objective.dtype.base_dtype != constraints.dtype.base_dtype or
objective.dtype.base_dtype != proxy_constraints.dtype.base_dtype):
raise ValueError("objective, constraints and proxy_constraints must "
"have the same dtype")
# Flatten both constraints tensors to 1d.
num_constraints = minimization_problem.num_constraints
constraints = standard_ops.reshape(constraints, shape=(num_constraints,))
proxy_constraints = standard_ops.reshape(
proxy_constraints, shape=(num_constraints,))
# We use a lambda to initialize the state so that, if this function call is
# inside the scope of a tf.control_dependencies() block, the dependencies
# will not be applied to the initializer.
state = standard_ops.Variable(
lambda: self._initial_state(num_constraints),
trainable=False,
name="external_regret_optimizer_state")
multipliers = self._lagrange_multipliers(state)
loss = (
objective + standard_ops.tensordot(
standard_ops.cast(multipliers, proxy_constraints.dtype),
proxy_constraints, 1))
multipliers_gradient = standard_ops.cast(constraints, multipliers.dtype)
update_ops = []
if self.constraint_optimizer is None:
# If we don't have a separate constraint_optimizer, then we use
# self._optimizer for both the update of the model parameters, and that of
# the internal state.
grads_and_vars = self.optimizer.compute_gradients(
loss,
var_list=var_list,
gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss)
grads_and_vars.append(
self._constraint_grad_and_var(state, multipliers_gradient))
update_ops.append(
self.optimizer.apply_gradients(grads_and_vars, name="update"))
else:
# If we have a separate constraint_optimizer, then we use self._optimizer
# for the update of the model parameters, and self._constraint_optimizer
# for that of the internal state.
grads_and_vars = self.optimizer.compute_gradients(
loss,
var_list=var_list,
gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss)
multiplier_grads_and_vars = [
self._constraint_grad_and_var(state, multipliers_gradient)
]
gradients = [
gradient for gradient, _ in grads_and_vars + multiplier_grads_and_vars
if gradient is not None
]
with ops.control_dependencies(gradients):
update_ops.append(
self.optimizer.apply_gradients(grads_and_vars, name="update"))
update_ops.append(
self.constraint_optimizer.apply_gradients(
multiplier_grads_and_vars, name="optimizer_state_update"))
with ops.control_dependencies(update_ops):
if global_step is None:
# If we don't have a global step, just project, and we're done.
return self._projection_op(state, name=name)
else:
# If we have a global step, then we need to increment it in addition to
# projecting.
projection_op = self._projection_op(state, name="project")
with ops.colocate_with(global_step):
global_step_op = state_ops.assign_add(
global_step, 1, name="global_step_increment")
return control_flow_ops.group(projection_op, global_step_op, name=name)
class AdditiveExternalRegretOptimizer(_ExternalRegretOptimizer):
"""A `ConstrainedOptimizer` based on external-regret minimization.
This `ConstrainedOptimizer` uses the given `tf.train.Optimizer`s to jointly
minimize over the model parameters, and maximize over Lagrange multipliers,
with the latter maximization using additive updates and an algorithm that
minimizes external regret.
For more specifics, please refer to:
> Cotter, Jiang and Sridharan. "Two-Player Games for Efficient Non-Convex
> Constrained Optimization".
> [https://arxiv.org/abs/1804.06500](https://arxiv.org/abs/1804.06500)
The formulation used by this optimizer--which is simply the usual Lagrangian
formulation--can be found in Definition 1, and is discussed in Section 3. It
is most similar to Algorithm 3 in Appendix C.3, with the two differences being
that it uses proxy constraints (if they're provided) in the update of the
model parameters, and uses `tf.train.Optimizer`s, instead of SGD, for the
"inner" updates.
"""
def __init__(self,
optimizer,
constraint_optimizer=None,
maximum_multiplier_radius=None):
"""Constructs a new `AdditiveExternalRegretOptimizer`.
Args:
optimizer: tf.train.Optimizer, used to optimize the objective and
proxy_constraints portion of ConstrainedMinimizationProblem. If
constraint_optimizer is not provided, this will also be used to optimize
the Lagrange multipliers.
constraint_optimizer: optional tf.train.Optimizer, used to optimize the
Lagrange multipliers.
maximum_multiplier_radius: float, an optional upper bound to impose on the
sum of the Lagrange multipliers.
Returns:
A new `AdditiveExternalRegretOptimizer`.
Raises:
ValueError: If the maximum_multiplier_radius parameter is nonpositive.
"""
super(AdditiveExternalRegretOptimizer, self).__init__(
optimizer=optimizer, constraint_optimizer=constraint_optimizer)
if maximum_multiplier_radius and (maximum_multiplier_radius <= 0.0):
raise ValueError("maximum_multiplier_radius must be strictly positive")
self._maximum_multiplier_radius = maximum_multiplier_radius
def _initial_state(self, num_constraints):
# For an AdditiveExternalRegretOptimizer, the internal state is simply a
# tensor of Lagrange multipliers with shape (m,), where m is the number of
# constraints.
#
# FUTURE WORK: make the dtype a parameter.
return standard_ops.zeros((num_constraints,), dtype=dtypes.float32)
def _lagrange_multipliers(self, state):
return state
def _constraint_grad_and_var(self, state, gradient):
# TODO(acotter): tf.colocate_with(), if colocate_gradients_with_ops is True?
return (-gradient, state)
def _projection_op(self, state, name=None):
with ops.colocate_with(state):
if self._maximum_multiplier_radius:
projected_multipliers = _project_multipliers_wrt_euclidean_norm(
state, self._maximum_multiplier_radius)
else:
projected_multipliers = standard_ops.maximum(state, 0.0)
return state_ops.assign(state, projected_multipliers, name=name)
| dongjoon-hyun/tensorflow | tensorflow/contrib/constrained_optimization/python/external_regret_optimizer.py | Python | apache-2.0 | 16,425 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.