repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
endlessm/chromium-browser
|
third_party/depot_tools/fetch_configs/config_util.py
|
2
|
1638
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module holds utilities which make writing configs easier."""
from __future__ import print_function
import json
class Config(object):
"""Base class for all configs.
Provides methods that are expected to be overridden by child classes. Also
provides an command-line parsing method that converts the unified command-line
interface used in depot_tools to the unified python interface defined here."""
@staticmethod
def fetch_spec(_props):
"""Returns instructions to check out the project, conditioned on |props|."""
raise NotImplementedError
@staticmethod
def expected_root(_props):
"""Returns the directory into which the checkout will be performed."""
raise NotImplementedError
def handle_args(self, argv):
"""Passes the command-line arguments through to the appropriate method."""
methods = {'fetch': self.fetch_spec,
'root': self.expected_root}
if len(argv) <= 1 or argv[1] not in methods:
print('Must specify a a fetch/root action')
return 1
def looks_like_arg(arg):
return arg.startswith('--') and arg.count('=') == 1
bad_parms = [x for x in argv[2:] if not looks_like_arg(x)]
if bad_parms:
print('Got bad arguments %s' % bad_parms)
return 1
method = methods[argv[1]]
props = dict(x.split('=', 1) for x in (y.lstrip('-') for y in argv[2:]))
self.output(method(props))
@staticmethod
def output(data):
print(json.dumps(data))
|
bsd-3-clause
| -3,427,243,475,538,356,700
| 30.5
| 80
| 0.681929
| false
| 4.115578
| false
| false
| false
|
TacticalGoat/reddit
|
DelayBotT/delaybotT.py
|
1
|
4260
|
#/u/GoldenSights
import praw
import time
import datetime
import sqlite3
'''USER CONFIGURATION'''
APP_ID = ""
APP_SECRET = ""
APP_URI = ""
APP_REFRESH = ""
# https://www.reddit.com/comments/3cm1p8/how_to_make_your_bot_use_oauth2/
USERAGENT = ""
#This is a short description of what the bot does. For example "/u/GoldenSights' Newsletter Bot"
SUBREDDIT = "GoldTesting"
#This is the sub or list of subs to scan for new posts. For a single sub, use "sub1". For multiple subreddits, use "sub1+sub2+sub3+..."
MAXPOSTS = 30
#This is how many posts you want to retrieve all at once. PRAW can download 100 at a time.
WAIT = 20
#This is how many seconds you will wait between cycles. The bot is completely inactive during this time.
TSTRING = "[request]"
#This is the part of the title that you want to look for
DELAY = 172800
#This is the time limit between a user's posts, IN SECONDS. 1h = 3600 || 12h = 43200 || 24h = 86400 || 144h = 518400
'''All done!'''
WAITS = str(WAIT)
try:
import bot
USERAGENT = bot.aG
except ImportError:
pass
sql = sqlite3.connect('sql.db')
print('Loaded SQL Database')
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS users(name TEXT, lastpost TEXT)')
print('Loaded Users')
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(id TEXT)')
print('Loaded Oldposts')
sql.commit()
r = praw.Reddit(USERAGENT)
r.set_oauth_app_info(APP_ID, APP_SECRET, APP_URI)
r.refresh_access_information(APP_REFRESH)
def getTime(bool):
timeNow = datetime.datetime.now(datetime.timezone.utc)
timeUnix = timeNow.timestamp()
if bool == False:
return timeNow
else:
return timeUnix
def scan():
print('Scanning ' + SUBREDDIT)
subreddit = r.get_subreddit(SUBREDDIT)
posts = subreddit.get_new(limit=MAXPOSTS)
for post in posts:
try:
pauthor = post.author.name
except Exception:
pauthor = '[deleted]'
pid = post.id
plink = post.short_link
ptime = post.created_utc
ptitle = post.title.lower()
if TSTRING.lower() in ptitle:
cur.execute('SELECT * FROM oldposts WHERE id=?', [pid])
if not cur.fetchone():
cur.execute('SELECT * FROM users WHERE name=?', [pauthor])
if not cur.fetchone():
print('Found new user: ' + pauthor)
cur.execute('INSERT INTO users VALUES(?, ?)', (pauthor, pid))
r.send_message(pauthor, 'Welcome to /r/pkmntcgtrades!','Dear ' + pauthor + ',\n\n Our bot has determined that this is your first time posting in /r/pkmntcgtrades. Please take the time to read [the guidelines](http://www.reddit.com/r/pkmntcgtrades/wiki/guidelines) to understand how the subreddit works.\n\nIf you have any questions, feel free to [message the moderators.](http://www.reddit.com/message/compose?to=%2Fr%2Fpkmntcgtrades) Thanks, and happy trading!', captcha=None)
sql.commit()
print('\t' + pauthor + ' has been added to the database.')
time.sleep(5)
else:
cur.execute('SELECT * FROM users WHERE name=?', [pauthor])
fetch = cur.fetchone()
print('Found post by known user: ' + pauthor)
previousid = fetch[1]
previous = r.get_info(thing_id='t3_'+previousid)
previoustime = previous.created_utc
if ptime > previoustime:
curtime = getTime(True)
difference = curtime - previoustime
if difference >= DELAY:
print('\tPost complies with timelimit guidelines. Permitting')
cur.execute('DELETE FROM users WHERE name=?', [pauthor])
cur.execute('INSERT INTO users VALUES(?, ?)', (pauthor, pid))
sql.commit()
print('\t' + pauthor + "'s database info has been reset.")
else:
differences = '%.0f' % (DELAY - difference)
print('\tPost does not comply with timelimit guidelines. Author must wait ' + differences)
print('\t' + pauthor + "'s database info remains unchanged")
response = post.add_comment('You are posting here too frequently, so your post has been deleted. You may post again in ' + str(datetime.timedelta(seconds=float(differences))))
response.distinguish()
post.remove(spam=False)
time.sleep(5)
cur.execute('INSERT INTO oldposts VALUES(?)', [pid])
sql.commit()
while True:
try:
scan()
except Exception as e:
print('An error has occured:', e)
print('Running again in ' + WAITS + ' seconds.\n')
time.sleep(WAIT)
|
mit
| -3,111,328,070,677,242,400
| 35.101695
| 482
| 0.68662
| false
| 3.174367
| false
| false
| false
|
recursecenter/RSVPBot
|
strings.py
|
1
|
2577
|
import config
ANNOUNCE_MESSAGE = """
**[{title}]({url})**
{timestamp}
Created by {created_by}
To start an RSVPBot thread for this event:
```{key_word} init {url}```
""".strip()
MSG_CREATE_EVENT_ON_RC_CALENDAR = """
RSVPBot events are saved on the RC calendar. To create an event that will be tracked in this thread, go here: %s/calendar/new?{}
""".strip() % config.rc_root
MSG_INIT_SUCCESSFUL = 'This thread is now an RSVPBot event for **[{}]({})**! Type `rsvp help` for more options.'
MSG_EVENT_MOVED = "This event has been moved to **[%s](%s)**!"
ERROR_INVALID_COMMAND = "`%s` is not a valid RSVPBot command! Type `rsvp help` for the correct syntax."
ERROR_NOT_AN_EVENT = "This thread is not an RSVPBot event! Type `rsvp init event-url` to make it into an event."
ERROR_ALREADY_AN_EVENT = "Oops! That thread is already an RSVPBot event!"
ERROR_MISSING_MOVE_DESTINATION = "`rsvp move` requires a Zulip stream URL destination (e.g. 'https://recurse.zulipchat.com/#narrow/stream/announce/topic/All.20Hands.20Meeting')"
ERROR_BAD_MOVE_DESTINATION = "%s is not a valid move destination URL! `rsvp move` requires a Zulip stream URL destination (e.g. 'https://recurse.zulipchat.com/#narrow/stream/announce/topic/All.20Hands.20Meeting') Type `rsvp help` for the correct syntax."
ERROR_MOVE_ALREADY_AN_EVENT = "Oops! %s is already an RSVPBot event!"
ERROR_EVENT_NOT_FOUND = "Oops! I couldn't find this event: {}"
ERROR_EVENT_ALREADY_INITIALIZED = "Oops! This event was already initialized here: {}"
ERROR_GOOGLE_CALENDAR_NO_LONGER_USED = "Oops! RSVPBot no longer uses Google Calendar, but it uses the [RC Calendar](%s/calendar) instead. This event can be found [here]({})." % config.rc_root
ERROR_FUNCTIONALITY_MOVED = "Oops! RSVPBot doesn't support `rsvp {}` directly anymore. You can now do this [on the RC calendar]({})!"
ERROR_RSVP_MAYBE_NOT_SUPPORTED = "Oops! `rsvp maybe` is no longer supported."
ERROR_CANNOT_INIT_IN_ANNOUNCE_THREAD = "Oops! You cannot `rsvp init` in the announce thread."
ERROR_SERVER_EXCEPTION = ":scream: Something went terribly wrong inside RSVPBot. If this keeps happening, please ping `@Faculty`!"
ERROR_NO_EVENT_ID = """
`rsvp init` must be passed an RC Calendar event ID or URL. For example:
```
rsvp init %s/calendar/123-my-event
```
""".strip() % config.rc_root
ERROR_THREAD_FROM_RC_ALREADY_AN_EVENT = """
Oops! Someone tried to create an event on the RC calendar using this thread, but it's already tracking an event.
Here's the event: **[{title}]({url})**
To start another RSVPBot thread for this event:
```rsvp init {url}```
""".strip()
|
mit
| 5,160,972,723,061,756,000
| 52.6875
| 254
| 0.719829
| false
| 3.078853
| false
| false
| false
|
saymedia/flask-compress
|
flask_compress.py
|
1
|
3222
|
import gzip
try:
from io import BytesIO as IO
except:
import StringIO as IO
from flask import request
class Compress(object):
"""
The Compress object allows your application to use Flask-Compress.
When initialising a Compress object you may optionally provide your
:class:`flask.Flask` application object if it is ready. Otherwise,
you may provide it later by using the :meth:`init_app` method.
:param app: optional :class:`flask.Flask` application object
:type app: :class:`flask.Flask` or None
"""
def __init__(self, app=None):
"""
An alternative way to pass your :class:`flask.Flask` application
object to Flask-Compress. :meth:`init_app` also takes care of some
default `settings`_.
:param app: the :class:`flask.Flask` application object.
"""
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
defaults = [
('COMPRESS_MIMETYPES', ['text/html', 'text/css', 'text/xml',
'application/json',
'application/javascript']),
('COMPRESS_DEBUG', False),
('COMPRESS_LEVEL', 6),
('COMPRESS_MIN_SIZE', 500)
]
for k, v in defaults:
app.config.setdefault(k, v)
if app.config['COMPRESS_MIMETYPES']:
self.app.after_request(self.after_request)
def after_request(self, response):
# return the response untouched for responses that will never be
# gzipped, in any contexts.
if response.mimetype not in self.app.config['COMPRESS_MIMETYPES']:
return response
# At this point, always put the Vary header, even if the content
# is not gzipped in this particular context.
# Also, apparently, werkzeug has no documented method to "add", not "set", a header.
# So we rely on comma separated values.
if 'Vary' in response.headers and response.headers['Vary'] is not None and response.headers['Vary'] != "":
response.headers['Vary'] += ', Accept-Encoding'
else:
response.headers['Vary'] = 'Accept-Encoding'
if self.app.debug and not self.app.config['COMPRESS_DEBUG']:
return response
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
response.direct_passthrough = False
if (response.status_code < 200 or
response.status_code >= 300 or
len(response.data) < self.app.config['COMPRESS_MIN_SIZE'] or
'Content-Encoding' in response.headers):
return response
level = self.app.config['COMPRESS_LEVEL']
gzip_buffer = IO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=level,
fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
mit
| -5,863,244,114,132,079,000
| 33.645161
| 114
| 0.597455
| false
| 4.261905
| true
| false
| false
|
eco32i/ggplot
|
ggplot/geoms/geom_abline.py
|
1
|
1260
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import matplotlib.pyplot as plt
from matplotlib.dates import drange, date2num
from pandas.lib import Timestamp
from datetime import timedelta
import numpy as np
from .geom import geom
import pandas as pd
class geom_abline(geom):
VALID_AES = ['x', 'slope', 'intercept', 'color', 'linestyle', 'alpha', 'label']
def plot_layer(self, layer):
layer = dict((k, v) for k, v in layer.items() if k in self.VALID_AES)
layer.update(self.manual_aes)
if 'x' in layer:
x = layer.pop('x')
if 'slope' in layer:
slope = layer.pop('slope')
else:
slope = 1.0
if 'intercept' in layer:
intercept = layer.pop('intercept')
else:
intercept = 0.0
if isinstance(x[0], Timestamp):
gca = plt.gca()
gca.set_autoscale_on(False)
gca.plot(gca.get_xlim(),gca.get_ylim())
else:
start, stop = np.max(x), np.min(x)
step = ((stop-start)) / 100.0
x_rng = np.arange(start, stop, step)
y_rng = x_rng * slope + intercept
plt.plot(x_rng, y_rng, **layer)
|
bsd-2-clause
| -2,361,244,803,072,503,300
| 34
| 83
| 0.55873
| false
| 3.6
| false
| false
| false
|
qvazzler/Flexget
|
flexget/plugins/urlrewrite/isohunt.py
|
1
|
3608
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
from future.moves.urllib.parse import quote
import logging
import re
import feedparser
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.search import torrent_availability, normalize_unicode
log = logging.getLogger('isohunt')
class UrlRewriteIsoHunt(object):
"""IsoHunt urlrewriter and search plugin.
should accept:
isohunt: <category>
categories:
empty or -1: All
0 : Misc.
1 : Video/Movies
2 : Audio
3 : TV
4 : Games
5 : Apps
6 : Pics
7 : Anime
8 : Comics
9 : Books
10: Music Video
11: Unclassified
12: ALL
"""
schema = {
'type': 'string',
'enum': ['misc', 'movies', 'audio', 'tv', 'games', 'apps', 'pics', 'anime', 'comics', 'books', 'music video',
'unclassified', 'all']
}
def url_rewritable(self, task, entry):
url = entry['url']
# search is not supported
if url.startswith('http://isohunt.com/torrents/?ihq='):
return False
# not replaceable
if 'torrent_details' not in url:
return False
return url.startswith('http://isohunt.com') and url.find('download') == -1
def url_rewrite(self, task, entry):
entry['url'] = entry['url'].replace('torrent_details', 'download')
def search(self, task, entry, config):
# urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand
optionlist = ['misc', 'movies', 'audio', 'tv', 'games', 'apps', 'pics', 'anime', 'comics', 'books',
'music video', 'unclassified', 'all']
entries = set()
search_strings = [normalize_unicode(s) for s in entry.get('search_strings', [entry['title']])]
for search_string in search_strings:
url = 'http://isohunt.com/js/rss/%s?iht=%s&noSL' % (
quote(search_string.encode('utf-8')), optionlist.index(config))
log.debug('requesting: %s' % url)
rss = feedparser.parse(url)
status = rss.get('status', False)
if status != 200:
raise plugin.PluginWarning('Search result not 200 (OK), received %s' % status)
ex = rss.get('bozo_exception', False)
if ex:
raise plugin.PluginWarning('Got bozo_exception (bad feed)')
for item in rss.entries:
entry = Entry()
entry['title'] = item.title
entry['url'] = item.link
m = re.search(r'Size: ([\d]+).*Seeds: (\d+).*Leechers: (\d+)', item.description, re.IGNORECASE)
if not m:
log.debug('regexp did not find seeds / peer data')
continue
else:
log.debug('regexp found size(%s), Seeds(%s) and Leeches(%s)' % (m.group(1), m.group(2), m.group(3)))
entry['content_size'] = int(m.group(1))
entry['torrent_seeds'] = int(m.group(2))
entry['torrent_leeches'] = int(m.group(3))
entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches'])
entries.add(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteIsoHunt, 'isohunt', groups=['urlrewriter', 'search'], api_ver=2)
|
mit
| -2,577,430,881,180,603,000
| 33.361905
| 120
| 0.56541
| false
| 3.762252
| false
| false
| false
|
standage/sequniq
|
sequniq/parse.py
|
1
|
3833
|
# -----------------------------------------------------------------------------
# Copyright (C) Daniel Standage, 2015. It is licensed under the ISC license,
# see LICENSE.txt. Contact: daniel.standage@gmail.com
# -----------------------------------------------------------------------------
"""
Generators for parsing sequence data records in Fasta and Fastq.
"""
import sys
def get_parser(fastq=True, paired=True):
"""
Return a pointer to the correct parsing function based on the specified
format of the data.
"""
if fastq:
if paired:
return fastq_paired
else:
return fastq
else:
if paired:
return fasta_paired
else:
return fasta
def check_record(record, fastq=True, paired=True):
"""
"""
if fastq:
if paired:
assert len(record) == 6
else:
assert len(record) == 3
else:
if paired:
assert len(record) == 4
else:
assert len(record) == 2
def fasta(fp):
"""
Generator yields sequence records from Fasta files. Stolen shamelessly from
http://stackoverflow.com/a/7655072/459780.
"""
name, seq = None, []
for line in fp:
line = line.rstrip()
if line.startswith(">"):
if name:
yield name, ''.join(seq)
name, seq = line, []
else:
seq.append(line)
if name:
yield name, ''.join(seq)
def fasta_paired(fp):
"""
Generator yields paired sequence records from Fasta files.
"""
defline_i, seq_i = None, None
for defline_j, seq_j in fasta(fp):
if seq_i is None:
defline_i, seq_i = defline_j, seq_j
else:
yield defline_i, seq_i, defline_j, seq_j
defline_i, seq_i = None, None
assert seq_i is None, 'paired Fasta data contains odd number of sequences'
def fastq(fp):
"""
Generator yields unpaired sequence records from Fastq files. Only supports
4-line Fastq format.
"""
linecount = 0
name, seq, qual = [None] * 3
for line in fp:
linecount += 1
if linecount % 4 == 1:
name = line.rstrip()
elif linecount % 4 == 2:
seq = line.rstrip()
elif linecount % 4 == 0:
qual = line.rstrip()
yield name, seq, qual
name, seq, qual = [None] * 3
def fastq_paired(fp):
"""
Generator yields paired sequence records from Fastq files. Only supports
4-line Fastq format with interleaved pairs.
"""
linecount = 0
name1, seq1, qual1, name2, seq2, qual2 = [None] * 6
for line in fp:
linecount += 1
if linecount % 8 == 1:
name1 = line.rstrip()
elif linecount % 8 == 2:
seq1 = line.rstrip()
elif linecount % 8 == 4:
qual1 = line.rstrip()
elif linecount % 8 == 5:
name2 = line.rstrip()
elif linecount % 8 == 6:
seq2 = line.rstrip()
elif linecount % 8 == 0:
qual2 = line.rstrip()
yield name1, seq1, qual1, name2, seq2, qual2
name1, seq1, qual1, name2, seq2, qual2 = [None] * 6
def write(record, outstream=sys.stdout):
"""
Write Fasta/Fastq records.
Records are tuples:
- 2 elements = unpaired Fasta
- 3 elements = unpaired Fastq
- 4 elements = paired Fasta
- 6 elements = paired Fastq
"""
if len(record) == 2:
fmt = '%s\n%s'
elif len(record) == 4:
fmt = '%s\n%s\n%s\n%s'
elif len(record) == 3:
fmt = '%s\n%s\n+\n%s'
elif len(record) == 6:
fmt = '%s\n%s\n+\n%s\n%s\n%s\n+\n%s'
else:
raise Exception('record has % elements' % len(record))
print >> outstream, fmt % record
|
isc
| 2,973,343,182,879,858,700
| 26.378571
| 79
| 0.516045
| false
| 3.71775
| false
| false
| false
|
alienlike/courier
|
courier/models/account_link.py
|
1
|
1782
|
from datetime import datetime
from sqlalchemy import Column, Integer, ForeignKey, Boolean, DateTime
from sqlalchemy.orm import relationship, backref
from .base import DeclarativeBase
class AccountLink(DeclarativeBase):
# table
__tablename__ = 'account_link'
# columns
id = Column(Integer, primary_key=True, nullable=False)
peer_id = Column(Integer, ForeignKey('account_link.id'))
link_id = Column(Integer, ForeignKey('link.id', ondelete='CASCADE'), nullable=False)
from_account_id = Column(Integer, ForeignKey('account.id', ondelete='CASCADE'), nullable=False)
to_account_id = Column(Integer, ForeignKey('account.id', ondelete='CASCADE'), nullable=False)
created_date = Column(DateTime, nullable=False, default=datetime.now)
modified_date = Column(DateTime, nullable=False, default=datetime.now, onupdate=datetime.now)
hidden = Column(Boolean, nullable=False, default=False)
# relationships
peer = relationship('AccountLink', remote_side=[id], post_update=True)
link = relationship('Link',
backref=backref('account_links', lazy=True),
primaryjoin='Link.id==AccountLink.link_id')
from_account = relationship('Account',
backref=backref('account_links', lazy=True),
primaryjoin='Account.id==AccountLink.from_account_id')
to_account = relationship('Account',
primaryjoin='Account.id==AccountLink.to_account_id') # no backref
# constructor
def __init__(self, link, from_account, to_account):
self.link = link
self.from_account = from_account
self.to_account = to_account
self.hidden = False
|
gpl-3.0
| 4,681,192,711,699,085,000
| 45.921053
| 109
| 0.643659
| false
| 4.242857
| false
| false
| false
|
svm-zhang/poolseq_tk
|
sz_acount.py
|
1
|
3378
|
'''
python poolseq_tk.py count
Description: Count alleles at each SNP give the pileups
Author: Simo V. Zhang
Input: pileup file with reads bases converted to corresponding alleles
Output: pielup file with allele counts
(1) chr
(2) pos
(3) ref base
(4) alt base
(5) allele counts in the order of ref and alt, separated by colon
'''
import collections
import sys
import os
import sz_utils
from colortext import ColorText
def run_count(args):
''' Counting alleles at each SNP in the given pileup files '''
dPos = {}
if args.pos:
ColorText().info("[poolseq_tk] reading SNPs positions:", "stderr")
with open(args.pos, 'r') as fPOS:
for line in fPOS:
tmp_line = line.strip().split("\t")
chr = tmp_line[0]
pos = int(tmp_line[1])
if (chr, pos) not in dPos:
dPos[chr, pos] = 1
ColorText().info(" %d\n" %(len(dPos)), "stderr")
else:
ColorText().info("[poolseq_tk] no SNP positions provided ... [skipped]\n", "stderr")
ac = collections.defaultdict(tuple)
for pileup in args.pileups:
sz_utils.check_if_files_exist(pileup)
nsnps = 0
ColorText().info("[poolseq_tk] counting alleles in %s:" %(os.path.basename(pileup)), "stderr")
with open(pileup, 'r') as fMPILEUP:
for line in fMPILEUP:
nsnps += 1
tmp_line = line.strip().split("\t")
chr = tmp_line[0]
pos = int(tmp_line[1])
if (((chr, pos) in dPos and args.pos) or
(len(dPos) == 0 and not args.pos)):
ref_base = tmp_line[2]
alt_base = tmp_line[3]
nRefAlleles, nAltAlleles = 0, 0
if len(tmp_line) == 5:
nRefAlleles = tmp_line[-1].count(ref_base) + \
tmp_line[-1].count(ref_base.lower())
nAltAlleles = tmp_line[-1].count(alt_base) + \
tmp_line[-1].count(alt_base.lower())
if (chr, pos) not in ac:
ac[chr, pos] = [ref_base, alt_base, str(nRefAlleles), str(nAltAlleles)]
else:
ac[chr, pos] += [str(nRefAlleles), str(nAltAlleles)]
ColorText().info(" %d SNPs parsed\n" %(nsnps), "stderr")
fOUT = None
if args.out == sys.stdout:
fOUT = sys.stdout
else:
sz_utils.make_dirs_if_necessary(args.out)
fOUT = open(args.out, 'w')
ColorText().info("[poolseq_tk] outputting allele counts to table ...", "stderr")
for k in sorted(ac.iterkeys()):
chr = k[0]
pos = k[1]
i = 2
if len(ac[k][i:]) == 2*len(args.pileups):
fOUT.write("%s\t%d\t%s" %(chr, pos, "\t".join(ac[k][0:2])))
while i <= len(ac[k])-4:
fOUT.write("\t%s" %(":".join(ac[k][i:i+4])))
i += 4
fOUT.write("\n")
ColorText().info(" [done]\n", "stderr")
fOUT.close()
def parseReadsBases(reads_bases, refBase, altBase):
i = 0
nRefAlleles, nAltAlleles = 0, 0
nOtherAlleles = 0
cov = 0
while i < len(reads_bases):
if reads_bases[i] == '.':
nRefAlleles += 1
i += 1
elif reads_bases[i] == ',':
nRefAlleles += 1
i += 1
elif reads_bases[i] == altBase:
nAltAlleles += 1
i += 1
elif reads_bases[i] == altBase.lower():
nAltAlleles += 1
i += 1
elif reads_bases[i] in ['+', '-', '*']:
if reads_bases[i] == '*':
i += 1
else:
len_indel = int(re.search(r'\d+', reads_bases[i+1:i+3]).group())
i += len_indel + len(str(len_indel)) + 1
elif reads_bases[i] == '^':
i += 2
elif reads_bases[i] in ['N', 'n', '$']:
i += 1
else:
nOtherAlleles += 1
i += 1
cov += 1
return cov, nRefAlleles, nAltAlleles, nOtherAlleles
|
gpl-2.0
| -4,970,120,119,147,440,000
| 26.917355
| 96
| 0.598283
| false
| 2.511524
| false
| false
| false
|
mbauskar/erpnext
|
erpnext/patches/v8_7/make_subscription_from_recurring_data.py
|
1
|
1634
|
# Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import today
def execute():
frappe.reload_doc('subscription', 'doctype', 'subscription')
frappe.reload_doc('selling', 'doctype', 'sales_order')
frappe.reload_doc('buying', 'doctype', 'purchase_order')
frappe.reload_doc('accounts', 'doctype', 'sales_invoice')
frappe.reload_doc('accounts', 'doctype', 'purchase_invoice')
for doctype in ['Sales Order', 'Sales Invoice',
'Purchase Invoice', 'Purchase Invoice']:
for data in get_data(doctype):
make_subscription(doctype, data)
def get_data(doctype):
return frappe.db.sql(""" select name, from_date, end_date, recurring_type,recurring_id
next_date, notify_by_email, notification_email_address, recurring_print_format,
repeat_on_day_of_month, submit_on_creation
from `tab{0}` where is_recurring = 1 and next_date >= %s
""".format(doctype), today(), as_dict=1)
def make_subscription(doctype, data):
doc = frappe.get_doc({
'doctype': 'Subscription',
'reference_doctype': doctype,
'reference_document': data.name,
'start_date': data.from_date,
'end_date': data.end_date,
'frequency': data.recurring_type,
'repeat_on_day': data.repeat_on_day_of_month,
'notify_by_email': data.notify_by_email,
'recipients': data.notification_email_address,
'next_schedule_date': data.next_date,
'submit_on_creation': data.submit_on_creation
}).insert(ignore_permissions=True)
doc.submit()
if not doc.subscription:
frappe.db.set_value(doctype, data.name, "subscription", doc.name)
|
gpl-3.0
| -903,746,289,309,461,900
| 35.333333
| 87
| 0.72093
| false
| 3.124283
| false
| false
| false
|
ScienceWorldCA/domelights
|
backend/domeplayer/scripts/chaser-blue-fade.py
|
1
|
1152
|
import socket
import sys
import time
import random
import base64
""" Fading white chaser pattern """
HOST, PORT = "localhost", 9999
FIXTURES = 260
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((HOST, PORT))
canvas = []
for i in range(0,FIXTURES*3):
canvas.append(0)
fp = open( 'running', 'w' )
fp.write( '' )
fp.close()
running = True
while running:
for fixture in range( 0, FIXTURES ):
for i in range( 0, FIXTURES*3 ):
if i % 3 != 2:
canvas[i] = 0
if canvas[i] > 0:
canvas[i] = canvas[i] - 1
r_pixel = (fixture*3)
g_pixel = r_pixel + 1
b_pixel = g_pixel + 1
canvas[r_pixel] = 255 ### random.randint(0,255)
canvas[g_pixel] = 255 ### random.randint(0,255)
canvas[b_pixel] = 255 ### random.randint(0,255)
data = ''
for j in range(0,len(canvas)):
data = data + chr(canvas[j]) ## Blue
try:
sock.send(data)
except socket.error as msg:
print msg
break
time.sleep(0.0225)
## Check if we're still running
fp = open( 'running', 'r' )
inp = fp.read().strip()
if inp == "STOP":
running = False
fp.close()
sock.close()
|
apache-2.0
| 6,400,051,492,977,337,000
| 18.525424
| 56
| 0.594618
| false
| 2.716981
| false
| false
| false
|
PaddlePaddle/models
|
PaddleCV/image_classification/fast_imagenet/reader.py
|
1
|
6775
|
#copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import math
import random
import pickle
from tqdm import tqdm
import time
import multiprocessing
import transforms
import datasets
FINISH_EVENT = "FINISH_EVENT"
class PaddleDataLoader(object):
def __init__(self,
dataset,
indices=None,
concurrent=24,
queue_size=3072,
shuffle=True,
shuffle_seed=0):
self.dataset = dataset
self.indices = indices
self.concurrent = concurrent
self.shuffle = shuffle
self.shuffle_seed = shuffle_seed
self.queue_size = queue_size // self.concurrent
def _worker_loop(self, queue, worker_indices, worker_id):
cnt = 0
for idx in worker_indices:
cnt += 1
img, label = self.dataset[idx]
img = np.array(img).astype('uint8').transpose((2, 0, 1))
queue.put((img, label))
print("worker: [%d] read [%d] samples. " % (worker_id, cnt))
queue.put(FINISH_EVENT)
def reader(self):
def _reader_creator():
worker_processes = []
index_queues = []
total_img = len(self.dataset)
print("total image: ", total_img)
if self.shuffle:
self.indices = [i for i in xrange(total_img)]
random.seed(self.shuffle_seed)
random.shuffle(self.indices)
print("shuffle indices: %s ..." % self.indices[:10])
imgs_per_worker = int(math.ceil(total_img / self.concurrent))
for i in xrange(self.concurrent):
start = i * imgs_per_worker
end = (i + 1
) * imgs_per_worker if i != self.concurrent - 1 else None
sliced_indices = self.indices[start:end]
index_queue = multiprocessing.Queue(self.queue_size)
w = multiprocessing.Process(
target=self._worker_loop,
args=(index_queue, sliced_indices, i))
w.daemon = True
w.start()
worker_processes.append(w)
index_queues.append(index_queue)
finish_workers = 0
worker_cnt = len(worker_processes)
recv_index = 0
while finish_workers < worker_cnt:
while (index_queues[recv_index].empty()):
recv_index = (recv_index + 1) % self.concurrent
sample = index_queues[recv_index].get()
recv_index = (recv_index + 1) % self.concurrent
if sample == FINISH_EVENT:
finish_workers += 1
else:
yield sample
return _reader_creator
def train(traindir, sz, min_scale=0.08, shuffle_seed=0):
train_tfms = [
transforms.RandomResizedCrop(
sz, scale=(min_scale, 1.0)), transforms.RandomHorizontalFlip()
]
train_dataset = datasets.ImageFolder(traindir,
transforms.Compose(train_tfms))
return PaddleDataLoader(train_dataset, shuffle_seed=shuffle_seed).reader()
def test(valdir, bs, sz, rect_val=False):
if rect_val:
idx_ar_sorted = sort_ar(valdir)
idx_sorted, _ = zip(*idx_ar_sorted)
idx2ar = map_idx2ar(idx_ar_sorted, bs)
ar_tfms = [transforms.Resize(int(sz * 1.14)), CropArTfm(idx2ar, sz)]
val_dataset = ValDataset(valdir, transform=ar_tfms)
return PaddleDataLoader(
val_dataset, concurrent=1, indices=idx_sorted,
shuffle=False).reader()
val_tfms = [transforms.Resize(int(sz * 1.14)), transforms.CenterCrop(sz)]
val_dataset = datasets.ImageFolder(valdir, transforms.Compose(val_tfms))
return PaddleDataLoader(val_dataset).reader()
class ValDataset(datasets.ImageFolder):
def __init__(self, root, transform=None, target_transform=None):
super(ValDataset, self).__init__(root, transform, target_transform)
def __getitem__(self, index):
path, target = self.imgs[index]
sample = self.loader(path)
if self.transform is not None:
for tfm in self.transform:
if isinstance(tfm, CropArTfm):
sample = tfm(sample, index)
else:
sample = tfm(sample)
if self.target_transform is not None:
target = self.target_transform(target)
return sample, target
class CropArTfm(object):
def __init__(self, idx2ar, target_size):
self.idx2ar, self.target_size = idx2ar, target_size
def __call__(self, img, idx):
target_ar = self.idx2ar[idx]
if target_ar < 1:
w = int(self.target_size / target_ar)
size = (w // 8 * 8, self.target_size)
else:
h = int(self.target_size * target_ar)
size = (self.target_size, h // 8 * 8)
return transforms.center_crop(img, size)
def sort_ar(valdir):
idx2ar_file = valdir + '/../sorted_idxar.p'
if os.path.isfile(idx2ar_file):
return pickle.load(open(idx2ar_file, 'rb'))
print(
'Creating AR indexes. Please be patient this may take a couple minutes...'
)
val_dataset = datasets.ImageFolder(
valdir) # AS: TODO: use Image.open instead of looping through dataset
sizes = [img[0].size for img in tqdm(val_dataset, total=len(val_dataset))]
idx_ar = [(i, round(s[0] * 1.0 / s[1], 5)) for i, s in enumerate(sizes)]
sorted_idxar = sorted(idx_ar, key=lambda x: x[1])
pickle.dump(sorted_idxar, open(idx2ar_file, 'wb'))
print('Done')
return sorted_idxar
def chunks(l, n):
n = max(1, n)
return (l[i:i + n] for i in range(0, len(l), n))
def map_idx2ar(idx_ar_sorted, batch_size):
ar_chunks = list(chunks(idx_ar_sorted, batch_size))
idx2ar = {}
for chunk in ar_chunks:
idxs, ars = list(zip(*chunk))
mean = round(np.mean(ars), 5)
for idx in idxs:
idx2ar[idx] = mean
return idx2ar
|
apache-2.0
| 5,752,484,946,407,509,000
| 34.103627
| 82
| 0.585387
| false
| 3.724574
| false
| false
| false
|
pculture/unisubs
|
apps/teams/signals.py
|
1
|
7525
|
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import logging
from django import dispatch
logger = logging.getLogger(__name__)
member_leave = dispatch.Signal()
member_remove = dispatch.Signal()
video_removed_from_team = dispatch.Signal(providing_args=["team", "user"])
video_moved_from_team_to_team = dispatch.Signal(
providing_args=["destination_team", "old_team", "video"])
video_moved_from_project_to_project = dispatch.Signal(
providing_args=["old_project", "new_project", "video"])
team_settings_changed = dispatch.Signal(
providing_args=["user", "changed_settings", "old_settings"])
# Called when we're creating forms for the team manage videos page. The
# sender will be the team. Append new forms to the form_list parameter
build_video_management_forms = dispatch.Signal(providing_args=['form_list'])
# Notification-related signals
# There is quite a bit of indirection here, but the goal is to make
# dispatching these events as simple as possible, since it might occur
# in multiple places.
#
# 1) Client codes dispatches a signal listed in this module:
# ex: signals.api_on_subtitles_edited.send(subtitle_version)
# 2) The signal calls that handler, which chooses the right event name
# for the signal and calls the matching sub method (for videos, languages, etc)
# 3) The submethod finds all teams that should be notified (since a video)
# can belong to more than on team). For each team:
# 3a) Puts the right task on queue, if the teams has a TeamNotificationsSettings
# 3b) The taks querys the TeamNotificationSettings models to fire notifications
# 3c) The TNS checks if there is available data (e.g. which url to post to)
# 3d) Instantiates the right notification class (since specific partners must
# have their notification data massaged to their needs - e.g. changing the video
# ids to their own, or the api links to their own endpoints)
# 3e) The notification class fires the notification
def _teams_to_notify(video):
"""
Returns a list of teams to be notified of events releated to this
video.
"""
from teams.models import Team
from django.db.models import Q
return list(Team.objects.filter(
Q(notification_settings__isnull=False) |
Q(partner__notification_settings__isnull=False),
teamvideo__video=video))
def _execute_video_task(video, event_name):
from teams import tasks as team_tasks
from teams.models import TeamVideo
from django.db.models import Q
logger.info("notification: %s (video: %s)", event_name, video)
tvs = list(TeamVideo.objects.filter(
Q(team__notification_settings__isnull=False) |
Q(team__partner__notification_settings__isnull=False),
video=video))
for tv in tvs:
team_tasks.api_notify_on_video_activity.delay(
tv.team.pk,
event_name,
tv.video.video_id)
def _execute_language_task(language, event_name):
from teams import tasks as team_tasks
logger.info("notification: %s (language: %s)", event_name, language)
video = language.video
teams = _teams_to_notify(video)
for team in teams:
team_tasks.api_notify_on_language_activity.delay(
team.pk,
event_name,
language.pk)
def _execute_version_task(version, event_name):
from teams import tasks as team_tasks
logger.info("notification: %s (version: %s)", event_name, version)
video = version.video
teams = _teams_to_notify(video)
for team in teams:
team_tasks.api_notify_on_subtitles_activity.delay(
team.pk,
event_name,
version.pk)
def _execute_application_task(application, event_name):
from teams.tasks import api_notify_on_application_activity
api_notify_on_application_activity.delay(
application.team.pk,
event_name,
application.pk,
)
def api_on_subtitles_edited(sender, **kwargs):
from teams.models import TeamNotificationSetting
_execute_version_task(sender, TeamNotificationSetting.EVENT_SUBTITLE_NEW)
def api_on_subtitles_approved(sender, **kwargs):
from teams.models import TeamNotificationSetting
_execute_version_task(sender, TeamNotificationSetting.EVENT_SUBTITLE_APPROVED)
def api_on_subtitles_rejected(sender, **kwargs):
from teams.models import TeamNotificationSetting
_execute_version_task(sender, TeamNotificationSetting.EVENT_SUBTITLE_REJECTED)
def api_on_language_edited(sender, **kwargs):
from teams.models import TeamNotificationSetting
_execute_language_task(sender, TeamNotificationSetting.EVENT_LANGUAGE_EDITED)
def api_on_language_new(sender, **kwargs):
from teams.models import TeamNotificationSetting
_execute_language_task(sender, TeamNotificationSetting.EVENT_LANGUAGE_NEW)
def api_on_video_edited(sender, **kwargs):
from teams.models import TeamNotificationSetting
_execute_video_task(sender, TeamNotificationSetting.EVENT_VIDEO_EDITED)
def api_on_teamvideo_new(sender, **kwargs):
from teams import tasks as team_tasks
from teams.models import TeamNotificationSetting
return team_tasks.api_notify_on_video_activity.delay(
sender.team.pk,
TeamNotificationSetting.EVENT_VIDEO_NEW,
sender.video.video_id )
def api_on_application_new(sender, **kwargs):
from teams.models import TeamNotificationSetting
return _execute_application_task(sender, TeamNotificationSetting.EVENT_APPLICATION_NEW)
def api_on_language_deleted(sender, **kwargs):
from teams.models import TeamNotificationSetting
return _execute_language_task(
sender, TeamNotificationSetting.EVENT_LANGUAGE_DELETED)
#: Actual available signals
api_subtitles_edited = dispatch.Signal(providing_args=["version"])
api_subtitles_approved = dispatch.Signal(providing_args=["version"])
api_subtitles_rejected = dispatch.Signal(providing_args=["version"])
api_language_edited = dispatch.Signal(providing_args=["language"])
api_language_deleted = dispatch.Signal()
api_video_edited = dispatch.Signal(providing_args=["video"])
api_language_new = dispatch.Signal(providing_args=["language"])
api_teamvideo_new = dispatch.Signal(providing_args=["video"])
api_application_new = dispatch.Signal(providing_args=["application"])
# connect handlers
api_subtitles_edited.connect(api_on_subtitles_edited)
api_subtitles_approved.connect(api_on_subtitles_approved)
api_subtitles_rejected.connect(api_on_subtitles_rejected)
api_language_edited.connect(api_on_language_edited)
api_language_new.connect(api_on_language_new)
api_language_deleted.connect(api_on_language_deleted)
api_video_edited.connect(api_on_video_edited)
api_teamvideo_new.connect(api_on_teamvideo_new)
api_application_new.connect(api_on_application_new)
|
agpl-3.0
| -2,416,420,001,652,783,600
| 40.346154
| 91
| 0.732359
| false
| 3.783308
| false
| false
| false
|
robwarm/gpaw-symm
|
gpaw/test/cmrtest/cmr_test4.py
|
1
|
1281
|
# This test makes sure that the i/o interfaces work with CMR.
# CMR itself does not have to be installed for this test.
#
# The reason why CMR cannot use direct writes to DB/GPAW files is that
# GPAW cannot always write a GPAW without performing a new calculation e.g.
# GPAW(filename).write(...)
# fails in some rare cases.
import os
from ase import Atom, Atoms
from ase.calculators.emt import EMT
import warnings
# cmr calls all available methods in ase.atoms detected by the module inspect.
# Therefore also deprecated methods are called - and we choose to silence those warnings.
warnings.filterwarnings('ignore', 'ase.atoms.*deprecated',)
import cmr
# from cmr.tools.log import Log
# cmr.logger.set_message_selection(Log.MSG_TYPE_ALL)
a = 4.05
d = a / 2 ** 0.5
bulk = Atoms([Atom('Al', (0, 0, 0)),
Atom('Al', (0.5, 0.5, 0.5))],
pbc=True)
bulk.set_cell((d, d, a), scale_atoms=True)
h = 0.3
bulk.set_calculator(EMT())
e0 = bulk.get_potential_energy()
bulk.write("cmr_test4.traj")
bulk.write("cmr_test4a.cmr")
cmr.convert({"input":"cmr_test4.traj", "output":"cmr_test4.cmr"})
data = cmr.read("cmr_test4.cmr")
data.dump()
group = cmr.create_group()
group.add(data)
group.write("cmr_group4.cmr")
g = cmr.read("cmr_group4.cmr")
g.dump_all()
|
gpl-3.0
| -6,507,146,153,682,806,000
| 27.466667
| 89
| 0.69477
| false
| 2.809211
| true
| false
| false
|
specify/specify7
|
specifyweb/workbench/upload/tests/test_bugs.py
|
1
|
8187
|
import io
import json
import csv
from pprint import pprint
from unittest import skip
from datetime import datetime
from decimal import Decimal
from ..uploadable import Exclude
from ..upload_result import Uploaded, UploadResult, Matched, FailedBusinessRule, ReportInfo, TreeInfo
from ..upload_table import UploadTable, ScopedUploadTable, _to_many_filters_and_excludes, BoundUploadTable
from ..treerecord import TreeRecord, TreeDefItemWithParseResults
from ..upload import do_upload_csv
from ..upload_plan_schema import parse_plan
from .base import UploadTestsBase, get_table
class BugTests(UploadTestsBase):
def test_duplicate_refworks(self) -> None:
""" Andy found that duplicate reference works were being created from data similar to the following. """
reader = csv.DictReader(io.StringIO(
'''Catalog number,Type,Title,Volume,Pages,Date,DOI,URL,Author last name 1,Author first name 1,Author MI 1,Author last name 2,Author first name 2,Author MI 2,Author last name 3,Author first name 3,Author MI 3
10026,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
10168,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
10194,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
10199,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
10206,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
1861,1,pearl,1686,1-28,2008,10.11646/zootaxa.1686.1.1,https://doi.org/10.11646/zootaxa.1686.1.1,Conway,Kevin,W,Chen,,Wei-Jen,Mayden,Richard,L
5311,1,pearl,1686,1-28,2008,10.11646/zootaxa.1686.1.1,https://doi.org/10.11646/zootaxa.1686.1.1,Conway,Kevin,W,Chen,,Wei-Jen,Mayden,Richard,L
5325,1,pearl,1686,1-28,2008,10.11646/zootaxa.1686.1.1,https://doi.org/10.11646/zootaxa.1686.1.1,Conway,Kevin,W,Chen,,Wei-Jen,Mayden,Richard,L
5340,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
5362,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
5282,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
5900,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
6527,1,Centrum,44,721-732,2007,10.1139/e06-137,https://doi.org/10.1139/e06-137,Newbrey,Michael,G,Wilson,Mark,VH,Ashworth,Allan,C
7350,1,Centrum,44,721-732,2007,10.1139/e06-137,https://doi.org/10.1139/e06-137,Newbrey,Michael,G,Wilson,Mark,VH,Ashworth,Allan,C
7357,1,Centrum,44,721-732,2007,10.1139/e06-137,https://doi.org/10.1139/e06-137,Newbrey,Michael,G,Wilson,Mark,VH,Ashworth,Allan,C
7442,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
7486,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
7542,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
7588,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
7602,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
'''))
expected = [
Uploaded, # 10026,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
Matched, # 10168,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
Matched, # 10194,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
Matched, # 10199,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
Matched, # 10206,1,catfish,282,315,1969,10.5479/si.03629236.282.1,https://doi.org/10.5479/si.03629236.282.1,Taylor,William,R,,,,,,
Uploaded, # 1861,1,pearl,1686,1-28,2008,10.11646/zootaxa.1686.1.1,https://doi.org/10.11646/zootaxa.1686.1.1,Conway,Kevin,W,Chen,,Wei-Jen,Mayden,Richard,L
Matched, # 5311,1,pearl,1686,1-28,2008,10.11646/zootaxa.1686.1.1,https://doi.org/10.11646/zootaxa.1686.1.1,Conway,Kevin,W,Chen,,Wei-Jen,Mayden,Richard,L
Matched, # 5325,1,pearl,1686,1-28,2008,10.11646/zootaxa.1686.1.1,https://doi.org/10.11646/zootaxa.1686.1.1,Conway,Kevin,W,Chen,,Wei-Jen,Mayden,Richard,L
Uploaded, # 5340,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
Matched, # 5362,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
Matched, # 5282,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
Matched, # 5900,1,nepal,1047,1-19,2005,10.11646/zootaxa.1047.1.1,https://doi.org/10.11646/zootaxa.1047.1.1,Ng,Heok,H,Edds,David,R,,,
Uploaded, # 6527,1,Centrum,44,721-732,2007,10.1139/e06-137,https://doi.org/10.1139/e06-137,Newbrey,Michael,G,Wilson,Mark,VH,Ashworth,Allan,C
Matched, # 7350,1,Centrum,44,721-732,2007,10.1139/e06-137,https://doi.org/10.1139/e06-137,Newbrey,Michael,G,Wilson,Mark,VH,Ashworth,Allan,C
Matched, # 7357,1,Centrum,44,721-732,2007,10.1139/e06-137,https://doi.org/10.1139/e06-137,Newbrey,Michael,G,Wilson,Mark,VH,Ashworth,Allan,C
Uploaded, # 7442,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
Matched, # 7486,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
Matched, # 7542,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
Matched, # 7588,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
Matched, # 7602,1,The Clupeocephala,45,635-657,2010,10.4067/S0718-19572010000400009,https://doi.org/10.4067/S0718-19572010000400009,Arratia,Gloria,,,,,,,
]
plan = parse_plan(self.collection, json.loads('''
{
"baseTableName": "referencework",
"uploadable": {
"uploadTable": {
"wbcols": {
"referenceworktype": "Type",
"title": "Title"
},
"static": {},
"toOne": {},
"toMany": {
"authors": [
{
"wbcols": {},
"static": {},
"toOne": {
"agent": {
"uploadTable": {
"wbcols": {
"lastname": "Author last name 1"
},
"static": {"agenttype": 1},
"toOne": {},
"toMany": {}
}
}
}
},
{
"wbcols": {},
"static": {},
"toOne": {
"agent": {
"uploadTable": {
"wbcols": {
"lastname": "Author last name 2"
},
"static": {"agenttype": 1},
"toOne": {},
"toMany": {}
}
}
}
},
{
"wbcols": {},
"static": {},
"toOne": {
"agent": {
"uploadTable": {
"wbcols": {
"lastname": "Author last name 3"
},
"static": {"agenttype": 1},
"toOne": {},
"toMany": {}
}
}
}
}
]
}
}
}
}
'''))
upload_results = do_upload_csv(self.collection, reader, plan.apply_scoping(self.collection), self.agent.id)
rr = [r.record_result.__class__ for r in upload_results]
self.assertEqual(expected, rr)
|
gpl-2.0
| 6,606,023,491,129,068,000
| 55.854167
| 207
| 0.671064
| false
| 2.213301
| false
| false
| false
|
perplexes/couchapp
|
python/couchapp/utils/__init__.py
|
1
|
2821
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009 Benoit Chesneau <benoitc@e-engura.org>
#
# This software is licensed as described in the file LICENSE, which
# you should have received as part of this distribution.
#
import codecs
import os
import sys
import urlparse
import urllib
# compatibility with python 2.4
try:
from hashlib import md5 as _md5
except ImportError:
import md5
_md5 = md5.new
try:
import json
except ImportError:
import simplejson as json
def in_couchapp():
current_path = os.getcwd()
old_dirs = []
while 1:
dirs = os.listdir(current_path)
if dirs == old_dirs:
return False
if '.couchapprc' in dirs: break
current_path = os.path.normpath(os.path.join(current_path, '../'))
old_dirs = dirs
return current_path
def parse_uri(string):
parts = urlparse.urlsplit(urllib.unquote(string))
if parts[0] != 'http' and parts[0] != 'https':
raise ValueError('Invalid dbstring')
path = parts[2].strip('/').split('/')
dbname = ''
docid = ''
if len(path) >= 1:
db_parts=[]
i = 0
while 1:
try:
p = path[i]
except IndexError:
break
if p == '_design': break
db_parts.append(p)
i = i + 1
dbname = '/'.join(db_parts)
if i < len(path) - 1:
docid = '/'.join(path[i:])
server_uri = '%s://%s' % (parts[0], parts[1])
return server_uri, dbname, docid
def parse_auth(string):
parts = urlparse.urlsplit(urllib.unquote(string))
server_parts = parts[1].split('@')
if ":" in server_parts[0]:
username, password = server_parts[0].split(":")
else:
username = server_parts[0]
password = ''
server_uri = "%s://%s" % (parts[0], server_parts[1])
return username, password, server_uri
def get_appname(docid):
return docid.split('_design/')[1]
def read_file(fname):
f = codecs.open(fname, 'rb', "utf-8")
data = f.read()
f.close()
return data
def sign_file(file_path):
if os.path.isfile(file_path):
f = open(file_path, 'rb')
content = f.read()
f.close()
return _md5(content).hexdigest()
return ''
def write_content(filename, content):
f = open(filename, 'wb')
f.write(content)
f.close
def write_json(filename, content):
write_content(filename, json.dumps(content))
def read_json(filename):
try:
data = read_file(filename)
except IOError, e:
if e[0] == 2:
return {}
raise
try:
data = json.loads(data)
except ValueError:
print >>sys.stderr, "Json is invalid, can't load %s" % filename
return {}
return data
|
apache-2.0
| 5,177,487,938,545,326,000
| 21.934959
| 74
| 0.56682
| false
| 3.517456
| false
| false
| false
|
tiredpixel/pikka-bird-collector-py
|
pikka_bird_collector/collectors/postgresql.py
|
1
|
4994
|
from pikka_bird_collector.parsers.table import Table as Parser
from .base_port_command import BasePortCommand, Base
class Postgresql(BasePortCommand):
"""
Collector for PostgreSQL (http://www.postgresql.org/).
The collector is enabled whenever non-empty settings are passed.
Multiple instances running on the same box are supported; just specify
each port within settings.
By default, core status and replication status are gathered. Optionally,
settings can be gathered.
For consistency, `username` is called `user`.
DEPENDENCIES:
psql
Available in PATH.
SETTINGS:
minimal:
{
5432: None}
supported:
{
5432: {
'user': "USER",
'collect': {
'stat_replication': False,
'settings': True}}}
"""
COLLECT_SETTING_DEFAULTS = {
'stat_replication': True,
'settings': False}
CMD_STATUS = """
SELECT
inet_client_addr(),
inet_client_port(),
inet_server_addr(),
inet_server_port(),
pg_backend_pid(),
pg_backup_start_time(),
pg_conf_load_time(),
(CASE pg_is_in_backup()
WHEN 'f' THEN pg_current_xlog_insert_location()
END) AS pg_current_xlog_insert_location,
(CASE pg_is_in_backup()
WHEN 'f' THEN pg_current_xlog_location()
END) AS pg_current_xlog_location,
(CASE pg_is_in_backup()
WHEN 't' THEN 'on'
WHEN 'f' THEN 'off'
END) AS pg_is_in_backup,
(CASE pg_is_in_recovery()
WHEN 't' THEN 'on'
WHEN 'f' THEN 'off'
END) AS pg_is_in_recovery,
(CASE pg_is_in_recovery()
WHEN 't' THEN (CASE pg_is_xlog_replay_paused()
WHEN 't' THEN 'on'
WHEN 'f' THEN 'off'
END)
END) AS pg_is_xlog_replay_paused,
pg_last_xact_replay_timestamp(),
pg_last_xlog_receive_location(),
pg_last_xlog_replay_location(),
pg_postmaster_start_time(),
extract(epoch from (now() - pg_postmaster_start_time())) AS uptime_s,
version()
""".replace('\n', ' ')
CMD_SETTINGS = 'SELECT name, setting FROM pg_settings'
CMD_STAT_REPLICATION = 'SELECT * FROM pg_stat_replication'
PARSE_BOOLS = {
'on': True,
'off': False}
@staticmethod
def command_tool(port, settings, command):
settings = settings or {}
c = []
c.extend(['psql',
'--host', '127.0.0.1', # socket not (yet) supported
'--port', port,
'--dbname', 'template1',
'--command', command,
'--no-password',
'--quiet',
'--no-align',
'--pset=footer=off'])
if settings.get('user'):
c.append('--username=%s' % settings['user'])
return c
def collect_port(self, port, settings):
metrics = {}
o = self.command_output(port, settings, self.CMD_STATUS)
parser = Parser(
delim_col='|',
converter_key=Base.parse_str_setting_key,
converter_value=Postgresql.__parse_str_setting_value,
transpose=True)
ms = parser.parse(o)
if len(ms):
metrics['status'] = ms
else:
return metrics # service down; give up
if self.collect_setting('stat_replication', settings):
o = self.command_output(port, settings, self.CMD_STAT_REPLICATION)
parser = Parser(
delim_col='|',
converter_key=Base.parse_str_setting_key,
converter_value=Postgresql.__parse_str_setting_value,
tag_header_col='pid')
ms = parser.parse(o)
if len(ms):
metrics['stat_replication'] = ms
if self.collect_setting('settings', settings):
o = self.command_output(port, settings, self.CMD_SETTINGS)
parser = Parser(
delim_col='|',
converter_key=Base.parse_str_setting_key,
converter_value=Postgresql.__parse_str_setting_value)
ms = parser.parse(o)
if len(ms):
metrics['settings'] = ms
return metrics
@staticmethod
def __parse_str_setting_value(value):
v = Base.parse_str_setting_value(value)
if v in Postgresql.PARSE_BOOLS:
v = Postgresql.PARSE_BOOLS[v]
return v
|
mit
| -1,248,612,989,321,853,400
| 32.072848
| 80
| 0.489187
| false
| 4.305172
| false
| false
| false
|
elhuhdron/emdrp
|
emdrp/emdrp/dpCubeIter.py
|
1
|
19641
|
#!/usr/bin/env python
# The MIT License (MIT)
#
# Copyright (c) 2016 Paul Watkins, National Institutes of Health / NINDS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Generator class for creating chunk/size/offset/name information for hdf5 files
# containing blocks of supervoxels that overlap at the edges between blocks.
# This is the basis for "stitching" together blocks using an overlap method.
import argparse
import os
import numpy as np
class dpCubeIter(object):
LIST_ARGS = ['fileflags', 'filepaths', 'fileprefixes', 'filepostfixes', 'filemodulators',
'filepaths_affixes', 'filenames_suffixes', 'filemodulators_overlap']
TRUE_STRS = ['true', '1', 't', 'y', 'yes', 'yeah', 'yup', 'certainly', 'uh-huh']
#def __init__(self, inprefix, volume_range_beg, volume_range_end, overlap,
# cube_size=[1,1,1], left_remainder_size=[0,0,0], right_remainder_size=[0,0,0],
# chunksize=[128,128,128], leave_edge=False):
# # str - prefix for the name of the file
# self.inprefix = inprefix
# # (3,) int - beginning and end of ranges in chunks specified python-style
# self.volume_range_beg = np.array(volume_range_beg, dtype=np.int64)
# self.volume_range_end = np.array(volume_range_end, dtype=np.int64)
# # (3,) int - how much overlap in each direction in voxels
# self.overlap = np.array(overlap, dtype=np.int64)
# # (3,) int - size of each cube being stitched in chunks
# self.cube_size = np.array(cube_size, dtype=np.int64)
# # (3,) int - size of remainder edges on "left" and "right" sides for unaligned stitching in voxels
# self.left_remainder_size = np.array(left_remainder_size, dtype=np.int64)
# self.right_remainder_size = np.array(right_remainder_size, dtype=np.int64)
# # (3,) int - chunksize in voxels
# self.chunksize = np.array(chunksize, dtype=np.int64)
# # bool - whether to leave the overlap on the right edges
# self.leave_edge = bool(leave_edge)
def __init__(self, args):
# save command line arguments from argparse, see definitions in main or run with --help
for k, v in vars(args).items():
# do not override any values that are already set as a method of allowing inherited classes to specify
if hasattr(self,k): continue
if type(v) is list and k not in self.LIST_ARGS:
if len(v)==1:
setattr(self,k,v[0]) # save single element lists as first element
elif type(v[0]) is int: # convert the sizes and offsets to numpy arrays
setattr(self,k,np.array(v,dtype=np.int32))
else:
setattr(self,k,v) # store other list types as usual (floats)
else:
setattr(self,k,v)
# other inits
self.chunksize = self.use_chunksize
self.cube_size_voxels = self.cube_size * self.chunksize
self.left_remainder = self.left_remainder_size > 0; self.right_remainder = self.right_remainder_size > 0
self.volume_range = self.volume_range_end - self.volume_range_beg
assert( (self.volume_range % self.cube_size == 0).all() )
self.volume_step = self.volume_range // self.cube_size
self.volume_step += self.left_remainder; self.volume_step += self.right_remainder
self.volume_size = np.prod(self.volume_step)
# modulators default to all ones
self.nflags = len(self.fileflags)
# this is for the python interface mode (does not use the argument flag / file name creation stuff)
if self.nflags == 0: self.nflags = 1
if len(self.filemodulators) == 0:
self.filemodulators = np.ones((self.nflags,3),dtype=np.uint32)
else:
self.filemodulators = np.array(self.filemodulators,dtype=np.uint32).reshape((-1,3))
assert(self.filemodulators.shape[0] == self.nflags)
if len(self.filemodulators_overlap) == 0:
self.filemodulators_overlap = np.zeros((3,),dtype=np.uint32)
else:
self.filemodulators_overlap = np.array(self.filemodulators_overlap,dtype=np.uint32)
assert(self.filemodulators_overlap.size == 3)
# this is something of a hack to allow for creating hdf5s with overlaps from knossos-style cubes.
# xxx - probably not a good way to make this a lot cleaner without completely reimplementing emdrp
# data objects as knossos-style with compression and embedded overlap, make data more easily distributable
self.filemodulators_overlap_on = np.any(self.filemodulators_overlap > 0)
# did not see the point of omitting an overlap in just one dimensions (unclear use case)
assert( not self.filemodulators_overlap_on or np.all(self.filemodulators_overlap > 0) )
if self.filemodulators_overlap_on:
# remainders and modulator overlaps are not designed to work together and also use case?
assert( not self.left_remainder.any() and not self.right_remainder.any() )
self.filemodulators_overlap_volume_range = self.volume_range - 2
assert( (self.filemodulators_overlap_volume_range % self.filemodulators[-1,:] == 0).all() )
self.filemodulators_overlap_volume_step_inner = \
self.filemodulators_overlap_volume_range // self.filemodulators[-1,:]
self.filemodulators_overlap_cube_size = self.filemodulators[-1,:] + 2
self.filemodulators_overlap_volume_step = self.filemodulators_overlap_volume_step_inner * \
self.filemodulators_overlap_cube_size
self.filemodulators_overlap_volume_size = np.prod(self.filemodulators_overlap_volume_step)
if len(self.filepaths_affixes) == 0:
self.filepaths_affixes = [False for x in range(self.nflags)]
else:
assert( len(self.filepaths_affixes) == self.nflags )
self.filepaths_affixes = [s.lower() in self.TRUE_STRS for s in self.filepaths_affixes]
if len(self.filenames_suffixes) == 0:
self.filenames_suffixes = [True for x in range(self.nflags)]
else:
assert( len(self.filenames_suffixes) == self.nflags )
self.filenames_suffixes = [s.lower() in self.TRUE_STRS for s in self.filenames_suffixes]
def __iter__(self):
if self.filemodulators_overlap_on:
# this is something of a hack to allow for creating hdf5s with overlaps from knossos-style cubes.
use_volume_size = self.filemodulators_overlap_volume_size
use_volume_step = self.filemodulators_overlap_volume_step
fm_cube_size = self.filemodulators_overlap_cube_size
else:
use_volume_size = self.volume_size
use_volume_step = self.volume_step
cur_ovlp = np.zeros((3,),dtype=np.int32)
for cur_index in range(use_volume_size):
# the current volume indices, including the right and left remainders
cur_volume = np.array(np.unravel_index(cur_index, use_volume_step), dtype=np.int64)
if self.filemodulators_overlap_on:
# this is basically a completely seperate mode, consider as another script?
left_offset, is_left_border, is_right_border = [np.zeros((3,),dtype=np.int32) for i in range(3)]
is_left_remainder, is_right_remainder = [np.zeros((3,),dtype=np.bool) for i in range(2)]
cur_fm_volume = cur_volume // fm_cube_size
cur_chunk = (cur_volume * self.cube_size) - 2*cur_fm_volume + self.volume_range_beg
cur_ovlp = np.zeros((3,),dtype=np.int32)
sel = (cur_volume % fm_cube_size == 0)
cur_ovlp[sel] = -self.filemodulators_overlap[sel] # "top" cube overlap
sel = (cur_volume % fm_cube_size == fm_cube_size-1)
cur_ovlp[sel] = self.filemodulators_overlap[sel] # "bottom" cube overlap
size = self.cube_size_voxels
else:
# need special cases to handle the remainders
is_left_border = cur_volume == 0; is_right_border = cur_volume == (self.volume_step-1)
is_left_remainder = np.logical_and(is_left_border,self.left_remainder)
is_right_remainder = np.logical_and(is_right_border,self.right_remainder)
is_not_left_remainder = np.logical_not(is_left_remainder)
#is_not_right_remainder = np.logical_not(is_right_remainder)
assert( not (np.logical_and(is_left_remainder, is_right_remainder)).any() ) # bad use case
# left and right remainders are offset from the start of the previous and last chunks respectfully
cur_volume[is_not_left_remainder] -= self.left_remainder[is_not_left_remainder]
cur_chunk = cur_volume * self.cube_size + self.volume_range_beg
cur_chunk[is_left_remainder] -= self.cube_size[is_left_remainder]
left_offset = self.overlap.copy(); right_offset = self.overlap.copy();
if not self.leave_edge:
right_offset[is_right_border] = 0; left_offset[is_left_border] = 0
# default size is adding left and right offsets
size = self.cube_size_voxels + left_offset + right_offset
# special cases for remainder blocks
size[is_left_remainder] = self.left_remainder_size[is_left_remainder] + right_offset[is_left_remainder]
size[is_right_remainder] = self.right_remainder_size[is_right_remainder] + \
left_offset[is_right_remainder]
left_offset = -left_offset # default left offset is set negative as returned offset
# left offset for left remainder block is from the left side of previous cube
left_offset[is_left_remainder] = \
self.cube_size_voxels[is_left_remainder] - self.left_remainder_size[is_left_remainder]
# modified to allow for "modulators" which allows for chunk descriptors that only change at multiples of
# cube_size. allows for cubeiter to create command lines containing arguments with different cube_sizes
suffixes = [None] * self.nflags; affixes = [None] * self.nflags
for j in range(self.nflags):
fm = self.filemodulators[j,:]
if (fm==1).all():
mcur_chunk = cur_chunk
else:
if self.filemodulators_overlap_on:
mcur_chunk = cur_fm_volume*self.filemodulators[-1,:]*self.cube_size + self.volume_range_beg + 1
else:
mcur_chunk = (cur_volume // fm)*fm * self.cube_size + self.volume_range_beg
# create the name suffixes, path affixes
suffixes[j] = ''; affixes[j] = ''
for s,i in zip(['x','y','z'], range(3)):
r = 'l' if is_left_remainder[i] else ('r' if is_right_remainder[i] else '')
suffixes[j] += ('_%s%04d' % (s + r, mcur_chunk[i]))
affixes[j] = os.path.join(affixes[j], ('%s%04d' % (s, mcur_chunk[i])))
affixes[j] += os.path.sep
yield cur_volume, size, cur_chunk, left_offset, suffixes, affixes, is_left_border, is_right_border, cur_ovlp
def flagsToString(self, flags, paths, prefixes, postfixes, suffixes, affixes):
argstr = ' '
for flag, path, prefix, postfix, suffix, affix in zip(flags, paths, prefixes, postfixes, suffixes, affixes):
if flag != '0':
argstr += '--' + flag + ' '
# xxx - better names?
# affix is the optional knossos-style path (i.e., x0001/y0002/z0005)
# prefix is the specified file name without an extension or path
# suffix is the optional knossos-style addition to the filename (i.e., _x0001_y0002_z0005)
# postfix is the file extension
name = affix + prefix + suffix + postfix
if path != '0':
name = os.path.join(path,name)
argstr += name + ' '
return argstr
def printCmds(self):
if self.cmdfile:
with open(self.cmdfile, 'r') as myfile:
cmd = myfile.read().split('\n'); cmd = [x for x in cmd if x]
else:
cmd = [self.cmd]
ncmd = len(cmd)
cnt = 0
for volume_info in self:
_, size, cur_chunk, left_offset, suffixes, affixes, is_left_border, is_right_border, cur_ovlp = volume_info
ccmd = cmd[0] if ncmd == 1 else cmd[cnt]
str_volume = (' --size %d %d %d ' % tuple(size.tolist())) + \
(' --chunk %d %d %d ' % tuple(cur_chunk.tolist())) + \
(' --offset %d %d %d ' % tuple(left_offset.tolist()))
if self.filemodulators_overlap_on:
str_volume += (' --overlap %d %d %d ' % tuple(cur_ovlp.tolist()))
str_inputs = self.flagsToString(self.fileflags, self.filepaths, self.fileprefixes, self.filepostfixes,
[x if y else '' for x,y in zip(suffixes, self.filenames_suffixes)],
[x if y else '' for x,y in zip(affixes, self.filepaths_affixes)])
str_cmd = ccmd + (''if self.no_volume_flags else str_volume) + str_inputs
if self.pre_cmd: str_cmd = self.pre_cmd + ';' + str_cmd
if self.post_cmd: str_cmd = str_cmd + ';' + self.post_cmd
print(str_cmd)
cnt += 1
@classmethod
def cubeIterGen(cls, volume_range_beg, volume_range_end, overlap, cube_size,
left_remainder_size=None, right_remainder_size=None, chunksize=None, leave_edge=None):
parser = argparse.ArgumentParser(description='cubeIterGen:dpCubeIter',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
dpCubeIter.addArgs(parser); arg_str = ''
arg_str += ' --volume_range_beg %d %d %d ' % tuple(volume_range_beg)
arg_str += ' --volume_range_end %d %d %d ' % tuple(volume_range_end)
arg_str += ' --overlap %d %d %d ' % tuple(overlap)
arg_str += ' --cube_size %d %d %d ' % tuple(cube_size)
if left_remainder_size is not None: arg_str += ' --left_remainder_size %d %d %d ' % tuple(left_remainder_size)
if right_remainder_size is not None: arg_str += '--right_remainder_size %d %d %d ' % tuple(right_remainder_size)
if chunksize is not None: arg_str += ' --use-chunksize %d %d %d ' % tuple(chunksize)
if leave_edge: arg_str += ' --leave_edge '
args = parser.parse_args(arg_str.split())
return cls(args)
@staticmethod
def addArgs(p):
# adds arguments required for this object to specified ArgumentParser object
p.add_argument('--cmdfile', nargs=1, type=str, default='',
help='Full name and path of text file containing command')
p.add_argument('--cmd', nargs=1, type=str, default='', help='Specify command on command line as string')
p.add_argument('--pre-cmd', nargs=1, type=str, default='',
help='Semi-colon delimited command to print before generated command')
p.add_argument('--post-cmd', nargs=1, type=str, default='',
help='Semi-colon delimited command to print after generated command')
# arguments that modulate each parameter that is being iterated by cubeiter
p.add_argument('--fileflags', nargs='*', type=str, default=[],
help='in/out files command line switches (0 for none)')
p.add_argument('--filepaths', nargs='*', type=str, default=[], help='in/out files paths (0 for none)')
p.add_argument('--fileprefixes', nargs='*', type=str, default=[], help='in/out files filename prefixes')
p.add_argument('--filepostfixes', nargs='*', type=str, default=[], help='in/out files filename postfixes')
p.add_argument('--filemodulators', nargs='*', type=int, default=[],
help='Allows for supervolumes at multiples of cube_size (x0 y0 z0 x1 y1 z1 ...)')
p.add_argument('--filemodulators-overlap', nargs='*', type=int, default=[],
help='Optional overlap (in voxels) for LAST modulator (x0 y0 z0 x1 y1 z1 ...)')
p.add_argument('--filepaths-affixes', nargs='*', type=str, default=[],
help='Whether to append suffix to each filepath (knossos-style, default false)')
p.add_argument('--filenames-suffixes', nargs='*', type=str, default=[],
help='Whether to append suffix to each filename (default true)')
p.add_argument('--volume_range_beg', nargs=3, type=int, default=[0,0,0], metavar=('X', 'Y', 'Z'),
help='Starting range in chunks for total volume')
p.add_argument('--volume_range_end', nargs=3, type=int, default=[0,0,0], metavar=('X', 'Y', 'Z'),
help='Ending range in chunks for total volume (python style)')
p.add_argument('--overlap', nargs=3, type=int, default=[0,0,0], metavar=('X', 'Y', 'Z'),
help='Amount of overlap in each direction')
p.add_argument('--cube_size', nargs=3, type=int, default=[0,0,0], metavar=('X', 'Y', 'Z'),
help='Size in chunks of iterate volume (superchunk)')
p.add_argument('--left_remainder_size', nargs=3, type=int, default=[0,0,0], metavar=('X', 'Y', 'Z'),
help='Size in voxels of "left" remainder volumes')
p.add_argument('--right_remainder_size', nargs=3, type=int, default=[0,0,0], metavar=('X', 'Y', 'Z'),
help='Size in voxels of "right" remainder volumes')
p.add_argument('--use-chunksize', nargs=3, type=int, default=[128,128,128], metavar=('X', 'Y', 'Z'),
help='Size of chunks in voxels')
p.add_argument('--leave_edge', action='store_true', help='Specify to leave overlap at edges of volume range')
p.add_argument('--no_volume_flags', action='store_true',
help='Do not include chunk, size and offset flags in output')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate command lines for parallelized cube processing',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
dpCubeIter.addArgs(parser)
args = parser.parse_args()
ci = dpCubeIter(args)
ci.printCmds()
|
mit
| 3,392,337,723,787,381,000
| 60.378125
| 120
| 0.611832
| false
| 3.722001
| false
| false
| false
|
jeeyoungk/exercise
|
python/calendar.py
|
1
|
1671
|
DAYS_IN_MONTH = [31,28,31,30,31,30,31,31,30,31,30,31]
DAYS_IN_MONTH_CUMULATIVE = [None for i in range(12)]
DOW = ['sun', 'mon', 'tues', 'wed', 'thurs', 'fri', 'sat']
# pre-calculate cumulative days till this month (not including this month).
for i in range(12):
if i == 0: DAYS_IN_MONTH_CUMULATIVE[i] = 0
else: DAYS_IN_MONTH_CUMULATIVE[i] = DAYS_IN_MONTH_CUMULATIVE[i-1] + DAYS_IN_MONTH[i-1]
def year_component(year):
year = year - 1 # don't count this year.
years = year
years4 = year / 4
years100 = year / 100
years400 = year / 400
nonleaps = years - years4 + years100 - years400
leaps = years - nonleaps
days = years * 365 + leaps
return days
def month_component(month):
return DAYS_IN_MONTH_CUMULATIVE[month - 1]
def day_component(day):
return day
def is_leap_year(y):
if y % 4 != 0: return False # normal year
if y % 100 != 0: return True # not every 100 years
if y % 400 != 0: return False # not every 400 years
return True
def weekday(year, month, day):
days = year_component(year) + month_component(month) + day_component(day)
if month > 2 and is_leap_year(year): days += 1
return DOW[(days) % 7]
print weekday(1301, 1, 1) == 'sat'
print weekday(1701, 1, 1) == 'sat'
print weekday(1799, 1, 1) == 'tues'
print weekday(1801, 1, 1) == 'thurs'
print weekday(1899, 1, 1) == 'sun'
print weekday(1901, 1, 1) == 'tues'
print weekday(1998, 1, 1) == 'thurs'
print weekday(1999, 1, 1) == 'fri'
print weekday(2013, 11, 1) == 'fri'
print weekday(2013, 1, 1) == 'tues'
print weekday(2017, 1, 31) == 'tues'
print weekday(2017, 2, 1) == 'wed'
print weekday(2017, 2, 2) == 'thurs'
|
mit
| -414,533,418,909,324,800
| 32.42
| 90
| 0.624177
| false
| 2.748355
| false
| false
| false
|
feureau/Small-Scripts
|
Blender/Blender config/2.91/scripts/addons/bricker_v2-2-1/functions/brick/mesh_generators/tile.py
|
1
|
4378
|
# Copyright (C) 2020 Christopher Gearhart
# chris@bblanimation.com
# http://bblanimation.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# System imports
import bpy
import bmesh
import math
import numpy as np
# Blender imports
from mathutils import Vector
# Module imports
from .generator_utils import *
def make_tile(dimensions:dict, brick_type:str, brick_size:list, circle_verts:int=None, type:str=None, detail:str="LOW", bme:bmesh=None):
"""
create inverted slope brick with bmesh
Keyword Arguments:
dimensions -- dictionary containing brick dimensions
brick_type -- cm.brick_type
brick_size -- size of brick (e.g. standard 2x4 -> [2, 4, 3])
circle_verts -- number of vertices per circle of cylinders
type -- type of tile in ('TILE', 'TILE_GRILL')
detail -- level of brick detail (options: ('FLAT', 'LOW', 'HIGH'))
bme -- bmesh object in which to create verts
"""
# create new bmesh object
bme = bmesh.new() if not bme else bme
# get halfScale
d = Vector((dimensions["half_width"], dimensions["half_width"], dimensions["half_height"]))
d.z = d.z * (brick_size[2] if flat_brick_type(brick_type) else 1)
# get scalar for d in positive xyz directions
scalar = Vector((brick_size[0] * 2 - 1,
brick_size[1] * 2 - 1,
1))
d_scaled = vec_mult(d, scalar)
# get thickness of brick from inside to outside
thick_xy = dimensions["thickness"] - (dimensions["tick_depth"] if "High" in detail and min(brick_size) != 1 else 0)
thick = Vector((thick_xy, thick_xy, dimensions["thickness"]))
# create cube
if "GRILL" in type:
coord1 = -d
coord1.z += dimensions["slit_height"]
coord2 = d_scaled
coord2.z = coord1.z
v1, v4, v3, v2 = make_rectangle(coord1, coord2, face=False, bme=bme)[1]
else:
sides = [1, 1 if detail == "FLAT" else 0, 1, 1, 1, 1]
coord1 = -d
coord1.z += dimensions["slit_height"]
coord2 = d_scaled
v1, v2, v3, v4, v5, v6, v7, v8 = make_cube(coord1, coord2, sides, bme=bme)[1]
# make verts for slit
slit_depth = Vector([dimensions["slit_depth"]]*2)
coord1 = -d
coord1.xy += slit_depth
coord2 = Vector((d_scaled.x, d_scaled.y, -d.z + dimensions["slit_height"]))
coord2.xy -= slit_depth
v9, v10, v11, v12, v13, v14, v15, v16 = make_cube(coord1, coord2, [0, 1 if detail == "FLAT" and "GRILL" not in type else 0, 1, 1, 1, 1], bme=bme)[1]
# connect slit to outer cube
bme.faces.new((v14, v4, v1, v13))
bme.faces.new((v15, v3, v4, v14))
bme.faces.new((v16, v2, v3, v15))
bme.faces.new((v13, v1, v2, v16))
# add details
if "GRILL" in type:
if brick_size[0] < brick_size[1]:
add_grill_details(dimensions, brick_size, thick, scalar, d, v4, v1, v2, v3, v9, v10, v11, v12, bme)
else:
add_grill_details(dimensions, brick_size, thick, scalar, d, v1, v2, v3, v4, v9, v10, v11, v12, bme)
elif detail != "FLAT":
# making verts for hollow portion
coord1 = -d + Vector((thick.x, thick.y, 0))
coord2 = vec_mult(d, scalar) - thick
v17, v18, v19, v20, v21, v22, v23, v24 = make_cube(coord1, coord2, [1, 0, 1, 1, 1, 1], flip_normals=True, bme=bme)[1]
# connect hollow portion to verts for slit
bme.faces.new((v18, v17, v9, v10))
bme.faces.new((v19, v18, v10, v11))
bme.faces.new((v20, v19, v11, v12))
bme.faces.new((v17, v20, v12, v9))
# add supports
if max(brick_size[:2]) > 2:
add_supports(dimensions, dimensions["height"], brick_size, brick_type, circle_verts, type, detail, d, scalar, thick, bme)
return bme
|
gpl-3.0
| -8,467,515,662,449,953
| 39.165138
| 152
| 0.620603
| false
| 3.02557
| false
| false
| false
|
lmjohns3/manifold-experiment
|
en-dee.py
|
1
|
6947
|
import climate
import lmj.plot
import logging
import numpy as np
import sklearn.decomposition
import theanets
climate.add_arg('--dimensions', type=int, default=10, metavar='D')
climate.add_arg('--samples', type=int, default=1000, metavar='N')
climate.add_arg('--clusters', type=int, default=20, metavar='K')
climate.add_arg('--features', type=int, default=20, metavar='F')
climate.add_arg('--viscosity', type=float, default=0.9, metavar='V')
climate.add_arg('--plot-pcs', type=int, nargs='+', metavar='K')
climate.add_arg('--plot-dimensions', type=int, nargs='+', metavar='D')
climate.add_arg('--plot-features', type=int, nargs='+', metavar='F')
climate.add_arg('--seed', type=int, metavar='S')
climate.add_arg('--hidden-l1', type=float, default=0, metavar='V')
climate.add_arg('--input-noise', type=float, default=0, metavar='V')
climate.add_arg('--hidden-dropout', type=float, default=0, metavar='V')
climate.add_arg('--activation', default='relu', metavar='A')
TAU = 2 * np.pi
def angle_between(sumsq, radius):
return abs(np.arccos(sumsq / np.sqrt(sumsq) / radius))
def create_dataset(args):
cov = 0.2 * np.eye(args.dimensions)
acc = np.random.randn(args.dimensions)
acc /= np.linalg.norm(acc)
vel = np.random.randn(args.dimensions)
vel /= np.linalg.norm(vel)
mus = [np.zeros(args.dimensions)]
dataset = []
for _ in range(args.clusters):
acc *= 1 - args.viscosity
acc += args.viscosity * np.random.randn(args.dimensions)
acc /= np.linalg.norm(acc)
vel += acc
vel /= np.linalg.norm(vel)
old = mus[-1]
new = mus[-1] + vel
dataset.extend(np.random.multivariate_normal(
(old + new) / 2, cov, args.samples // args.clusters))
mus.append(new)
return dataset
def plot_feature(ax, xs, ys, ux, uy, bias, length, name):
theta = angle_between(ux * ux + uy * uy, length)
if theta > TAU / 8:
return
c = ['#d62728', '#ff7f0e'][bias < 0]
ax.plot(xs, (ux * xs + bias) / -uy, '-', color=c, lw=2, alpha=0.9)
if abs(ux / uy) < 1:
z1, z2 = np.random.random(2) < 0.5
x = xs[[-5, 5][z1]]
y = (ux * x + bias) / -uy
dx = [-20, 20][z1]
dy = [-20, 20][z2]
rad = z1 ^ z2
else:
z1, z2 = np.random.random(2) < 0.5
y = ys[[-5, 5][z1]]
x = (uy * y + bias) / -ux
dy = [-20, 20][z1]
dx = [-20, 20][z2]
rad = not (z1 ^ z2)
ax.annotate(str(name), xy=(x, y), xytext=(dx, dy),
textcoords='offset points', ha='center',
color='#111111', alpha=0.5,
arrowprops=dict(
arrowstyle='->', color='#111111', alpha=0.5,
connectionstyle='arc3,rad={}0.5'.format('+-'[rad])))
class IdentityEncoder:
label = 'Dim'
def __init__(self, axes):
self.axes = axes
def __call__(self, n, x):
return x
class FeatureEncoder:
label = 'Feature'
def __init__(self, axes, network):
self.axes = axes
self.network = network
def __call__(self, n, x):
z = self.network.encode(x.astype('f'))
logging.info('%s %s -> %s', n, x.shape, z.shape)
return z
class PcaEncoder:
label = 'PC'
def __init__(self, axes, dataset):
self.axes = axes
self.pca = sklearn.decomposition.PCA(1 + max(self.axes))
self.pca.fit(dataset)
logging.info('PCA variance %s', self.pca.explained_variance_.round(2))
def __call__(self, n, x):
try:
z = self.pca.encode(x)
logging.info('%s %s -> %s', n, x.shape, z.shape)
return z
except:
return x
def plot(args, encode, dataset, net, plot_features=False):
encoders = net.find('hid1', 'w').get_value().T
decoders = net.find('out', 'w').get_value()
biases = net.find('hid1', 'b').get_value()
norms = np.sqrt((encoders * encoders).sum(axis=1))
noisy = dataset + np.random.randn(*dataset.shape)
shift = net.predict(noisy.astype('f')) - noisy
sizes = np.sqrt((shift * shift).sum(axis=1))
encoders_ = encode('encode', encoders)
decoders_ = encode('decode', decoders)
dataset_ = encode('data', dataset)
noisy_ = encode('noisy', noisy)
shift_ = encode('shift', shift)
last = len(encode.axes) - 1
for row, i in enumerate(encode.axes[1:]):
bottom = row == last - 1 and ' bottom' or ''
ymin = noisy_[:, i].min()
ymax = noisy_[:, i].max()
ypad = (ymax - ymin) * 0.2
ys = np.linspace(ymin - ypad, ymax + ypad, 127)
for col, j in enumerate(encode.axes[:-1]):
if col > row:
continue
left = col == 0 and ' left' or ''
pl = last, last, row * last + col + 1
ax = lmj.plot.create_axes(pl, spines=left + bottom)
#ax.plot(mus[:, j], mus[:, i])
ax.scatter(dataset_[:, j], dataset_[:, i], marker='.', alpha=0.1)
xmin = noisy_[:, j].min()
xmax = noisy_[:, j].max()
xpad = (xmax - xmin) * 0.2
xs = np.linspace(xmin - xpad, xmax + xpad, 127)
#for f, u in enumerate(decoders_):
# ax.arrow(0, 0, u[j], u[i], color='#2ca02c', lw=2)
if plot_features:
for name, (plane, bias, norm) in enumerate(zip(encoders_, biases, norms)):
plot_feature(ax, xs, ys, plane[j], plane[i], bias, norm, name)
style = dict(arrowstyle='->', color='#1f77b4', alpha=0.3)
for source, delta, norm in zip(noisy_, shift_, sizes):
sx, sy = source[j], source[i]
dx, dy = delta[j], delta[i]
if angle_between(dx * dx + dy * dy, norm) < TAU / 8:
ax.annotate('', xy=(sx + dx, sy + dy), xytext=(sx, sy), arrowprops=style)
ax.set_xlim(xs[0], xs[-1])
if bottom: ax.set_xlabel('{} {}'.format(encode.label, j + 1))
ax.set_ylim(ys[0], ys[-1])
if left: ax.set_ylabel('{} {}'.format(encode.label, i + 1))
def main(args):
if args.seed:
np.random.seed(args.seed)
dataset = np.asarray(create_dataset(args), 'f')
dataset -= dataset.mean(axis=0)
D = args.dimensions
e = theanets.Experiment(
theanets.Autoencoder,
layers=[D, (args.features, args.activation), D])
encode = None
if args.plot_dimensions:
encode = IdentityEncoder(args.plot_dimensions)
elif args.plot_features:
encode = FeatureEncoder(args.plot_features, e.network)
else:
encode = PcaEncoder(args.plot_pcs or list(range(args.dimensions)), dataset)
for i, _ in enumerate(e.itertrain(dataset, **vars(args))):
pass
plot(args, encode, dataset, e.network,
plot_features=not isinstance(encode, FeatureEncoder))
lmj.plot.show()
if __name__ == '__main__':
climate.call(main)
|
mit
| 2,385,183,796,207,257,000
| 31.462617
| 93
| 0.549158
| false
| 3.169252
| false
| false
| false
|
fgaudin/aemanager
|
accounts/migrations/0012_auto__add_field_expense_supplier.py
|
1
|
12652
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Expense.supplier'
db.add_column('accounts_expense', 'supplier', self.gf('django.db.models.fields.CharField')(max_length=70, null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Expense.supplier'
db.delete_column('accounts_expense', 'supplier')
models = {
'accounts.expense': {
'Meta': {'object_name': 'Expense', '_ormbases': ['core.OwnedObject']},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'payment_type': ('django.db.models.fields.IntegerField', [], {}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'supplier': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'})
},
'accounts.invoice': {
'Meta': {'ordering': "['invoice_id']", 'object_name': 'Invoice', '_ormbases': ['core.OwnedObject']},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contact.Contact']", 'null': 'True', 'blank': 'True'}),
'discount_conditions': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'edition_date': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'execution_begin_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'execution_end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'invoice_id': ('django.db.models.fields.IntegerField', [], {}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'paid_date': ('django.db.models.fields.DateField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'payment_date': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'payment_type': ('django.db.models.fields.IntegerField', [], {}),
'penalty_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'penalty_rate': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '4', 'decimal_places': '2', 'blank': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'})
},
'accounts.invoicerow': {
'Meta': {'object_name': 'InvoiceRow'},
'amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'balance_payments': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'category': ('django.db.models.fields.IntegerField', [], {}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoice_rows'", 'to': "orm['accounts.Invoice']"}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'proposal': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invoice_rows'", 'to': "orm['project.Proposal']"}),
'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '1'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contact.address': {
'Meta': {'object_name': 'Address', '_ormbases': ['core.OwnedObject']},
'city': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contact.Country']", 'null': 'True', 'blank': 'True'}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'street': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'blank': 'True'})
},
'contact.contact': {
'Meta': {'object_name': 'Contact', '_ormbases': ['core.OwnedObject']},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contact.Address']"}),
'company_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
'contact_type': ('django.db.models.fields.IntegerField', [], {}),
'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'contacts_rel_+'", 'null': 'True', 'to': "orm['contact.Contact']"}),
'email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'firstname': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'function': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'legal_form': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'representative': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'representative_function': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'})
},
'contact.country': {
'Meta': {'ordering': "['country_name']", 'object_name': 'Country'},
'country_code2': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'country_code3': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'country_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.ownedobject': {
'Meta': {'object_name': 'OwnedObject'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'project.project': {
'Meta': {'object_name': 'Project', '_ormbases': ['core.OwnedObject']},
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contact.Contact']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'})
},
'project.proposal': {
'Meta': {'ordering': "['begin_date', 'update_date']", 'object_name': 'Proposal', '_ormbases': ['core.OwnedObject']},
'amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'begin_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contract_content': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'ownedobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.OwnedObject']", 'unique': 'True', 'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['project.Project']"}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'}),
'update_date': ('django.db.models.fields.DateField', [], {})
}
}
complete_apps = ['accounts']
|
agpl-3.0
| 5,774,475,095,478,383,000
| 81.155844
| 183
| 0.551138
| false
| 3.649264
| false
| false
| false
|
laurentb/weboob
|
modules/lyricsmode/module.py
|
1
|
1723
|
# -*- coding: utf-8 -*-
# Copyright(C) 2016 Julien Veyssier
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.lyrics import CapLyrics, SongLyrics
from weboob.tools.backend import Module
from weboob.tools.compat import quote_plus
from .browser import LyricsmodeBrowser
__all__ = ['LyricsmodeModule']
class LyricsmodeModule(Module, CapLyrics):
NAME = 'lyricsmode'
MAINTAINER = u'Julien Veyssier'
EMAIL = 'eneiluj@gmx.fr'
VERSION = '2.1'
DESCRIPTION = 'Lyricsmode.com lyrics website'
LICENSE = 'AGPLv3+'
BROWSER = LyricsmodeBrowser
def get_lyrics(self, id):
return self.browser.get_lyrics(id)
def iter_lyrics(self, criteria, pattern):
return self.browser.iter_lyrics(criteria, quote_plus(pattern.encode('utf-8')))
def fill_songlyrics(self, songlyrics, fields):
if 'content' in fields:
sl = self.get_lyrics(songlyrics.id)
songlyrics.content = sl.content
return songlyrics
OBJECTS = {
SongLyrics: fill_songlyrics
}
|
lgpl-3.0
| -4,821,468,968,850,700,000
| 31.509434
| 86
| 0.713871
| false
| 3.589583
| false
| false
| false
|
LIMXTEC/BitCore
|
contrib/seeds/generate-seeds.py
|
1
|
4341
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCORE_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCORE_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the bitcore network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 8555)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 19335)
g.write('#endif // BITCORE_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
mit
| -4,901,292,662,621,745,000
| 30.456522
| 98
| 0.571067
| false
| 3.175567
| false
| false
| false
|
cardmaster/makeclub
|
controlers/url.py
|
1
|
3202
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
'''
We use this rule:
/clubs -> list clubs, or give more search query
/club/<slug> display one club(slug=<slug>), if not exists, return 404
/club/<slug>/edit edit club with slubg=<slug>, if not exists, create one
/club/<slug>/delete delete club with slubg=<slug>, if not exists, create one
/member/<slug>[/<user>] edit membership of club(slug=<slug>), user=<user>(current_user if omitted),
when post data to a non-exits membership, will cause a create.
/activity/<slug>/<aid> display activity of a club(slug=<slug>, aid=<aid)
/activity/<slug>/<aid>/edit edit activity of a club(slug=<slug>, aid=<aid)
/activity/<slug>/<aid>/(join|quit|confirm) join an activity of a club(slug=<slug>, aid=<aid), if specify an 'targetUser'
field in request data, will cause this targetUser join this activity
'''
import re
import os.path
pathjoin = os.path.join
def extPattern(base):
return base + '($|/.*)'
class ModuleUrlConf(object):
@staticmethod
def generatePattern(base):
return (base % '(\S+)')
def __init__(self, base, pattern=''):#Base url must have a %s, to specify the variable part
self.base = base
if (not pattern):
self.pattern = ModuleUrlConf.generatePattern(base)
else:
self.pattern = pattern
def path(self, *args):
strs = tuple ([str(arg) for arg in args])
return self.base % strs
def analyze(self, path):
reg = re.compile(self.pattern)
mat = reg.match (path)
if (mat):
try:
return mat.groups()
except:
return []
else:
return []
from helper import splitPath
class MemberUrlConf(ModuleUrlConf):
def __init__(self, stub):
super(MemberUrlConf, self).__init__(stub + '/%s/%s', stub + '/.*')
self.stub = stub
def path(self, slug, user=''):
return ModuleUrlConf.path(self, slug, user)
def analyze(self, path):
result = splitPath(path, self.stub, 2)
return result
urldict = dict (
ClubList = ModuleUrlConf('/clubs', extPattern('(/$|/clubs)') ),
ClubView = ModuleUrlConf('/club/%s', '/club/(\S+)/?$'),
ClubEdit = ModuleUrlConf('/club/%s/edit', '/club/(\S+)/edit/?$'),
Member = MemberUrlConf('/member'),
ClubPrivilige = MemberUrlConf('/priv'),
ActivityView = ModuleUrlConf('/act/id/%s', '/act/id/(\d+)/?$'),
ActivityEdit = ModuleUrlConf('/act/id/%s/edit', '/act/id/(\d+)/edit/?$'),
ActivityParticipate = ModuleUrlConf('/act/id/%s/%s', '/act/id/(\d+)/(join|quit|confirm|bill|rebill)/?$'),
ActivityNew = ModuleUrlConf('/act/new/%s', '/act/new/(\S+)/?$'),
Test = ModuleUrlConf('/test/%s', extPattern('/test'))
)
|
agpl-3.0
| -1,901,133,220,814,521,600
| 35.804598
| 120
| 0.682698
| false
| 3.133072
| false
| false
| false
|
JoeJimFlood/RugbyPredictifier
|
2020SuperRugby/matchup.py
|
1
|
16562
|
import os
os.chdir(os.path.dirname(__file__))
import sim_util
import sys
import pandas as pd
import numpy as np
from numpy.random import poisson, uniform
from numpy import mean
import time
import math
po = False
team_homes = pd.read_csv(os.path.join(os.path.split(__file__)[0], 'TeamHomes.csv'), header = None, index_col = 0)
stadium_locs = pd.read_csv(os.path.join(os.path.split(__file__)[0], 'StadiumLocs.csv'), index_col = 0)
teamsheetpath = os.path.join(os.path.split(__file__)[0], 'Score Tables')
compstat = {'TF': 'TA', 'TA': 'TF', #Dictionary to use to compare team stats with opponent stats
'CF': 'CA', 'CA': 'CF',
'CON%F': 'CON%A', 'CON%A': 'CON%F',
'PF': 'PA', 'PA': 'PF',
'DGF': 'DGA', 'DGA': 'DGF'}
def weighted_variance(data, weights):
assert len(data) == len(weights), 'Data and weights must be same length'
weighted_average = np.average(data, weights = weights)
v1 = weights.sum()
v2 = np.square(weights).sum()
return (weights*np.square(data - weighted_average)).sum() / (v1 - (v2/v1))
def get_opponent_stats(opponent, venue): #Gets summaries of statistics for opponent each week
opponent_stats = {}
global teamsheetpath, stadium_locs, team_homes
opp_stats = pd.DataFrame.from_csv(os.path.join(teamsheetpath, opponent + '.csv'))
opponent_home = team_homes[1][opponent]
(venue_lat, venue_lng) = stadium_locs.loc[venue, ['Lat', 'Long']]
(opponent_home_lat, opponent_home_lng) = stadium_locs.loc[opponent_home, ['Lat', 'Long']]
opponent_reference_distance = geodesic_distance(opponent_home_lat, opponent_home_lng, venue_lat, venue_lng)
def get_opponent_weight(location):
return get_travel_weight(location, opponent_home_lat, opponent_home_lng, opponent_reference_distance)
opp_stats['Weight'] = opp_stats['VENUE'].apply(get_opponent_weight)
for stat in opp_stats.columns:
if stat != 'VENUE':
if stat != 'OPP':
opponent_stats.update({stat: np.average(opp_stats[stat], weights = opp_stats['Weight'])})
try:
opponent_stats.update({'CON%F': float((opp_stats['CF']*opp_stats['Weight']).sum())/(opp_stats['TF']*opp_stats['Weight']).sum()})
except ZeroDivisionError:
opponent_stats.update({'CON%F': 0.75})
try:
opponent_stats.update({'CON%A': float((opp_stats['CA']*opp_stats['Weight']).sum())/(opp_stats['TA']*opp_stats['Weight']).sum()})
except ZeroDivisionError:
opponent_stats.update({'CON%A': 0.75})
return opponent_stats
def get_residual_performance(score_df): #Get how each team has done compared to the average performance of their opponents
global teamsheetpath, team_homes, stadium_locs
#score_df = pd.DataFrame.from_csv(os.path.join(teamsheetpath, team + '.csv'))
residual_stats = {}
residual_variances = {}
score_df['CON%F'] = np.nan
score_df['CON%A'] = np.nan
for week in score_df.index:
opponent_stats = get_opponent_stats(score_df['OPP'][week], score_df['VENUE'][week])
for stat in opponent_stats:
if week == score_df.index.tolist()[0]:
score_df['OPP_' + stat] = np.nan
score_df['OPP_' + stat][week] = opponent_stats[stat]
score_df['CON%F'][week] = float(score_df['CF'][week]) / score_df['TF'][week]
score_df['CON%A'][week] = float(score_df['CA'][week]) / score_df['TA'][week]
for stat in opponent_stats:
if stat == 'Weight':
continue
score_df['R_' + stat] = score_df[stat] - score_df['OPP_' + compstat[stat]]
if stat in ['TF', 'PF', 'DGF', 'TA', 'PA', 'DGA']:
residual_stats.update({stat: np.average(score_df['R_' + stat], weights = score_df['Weight'])})
residual_variances[stat] = weighted_variance(score_df['R_' + stat], score_df['Weight'])
elif stat == 'CON%F':
try:
residual_stats.update({stat: (score_df['R_CON%F'].multiply(score_df['TF'])*score_df['Weight']).sum() / (score_df['TF']*score_df['Weight']).sum()})
except ZeroDivisionError:
residual_stats.update({stat: 0})
elif stat == 'CON%A':
try:
residual_stats.update({stat: (score_df['R_CON%A'].multiply(score_df['TA'])*score_df['Weight']).sum() / (score_df['TA']*score_df['Weight']).sum()})
except ZeroDisivionError:
residual_stats.update({stat: 0})
return residual_stats, pd.Series(residual_variances)
#def get_score(expected_scores): #Get the score for a team based on expected scores
# score = 0
# if expected_scores['T'] > 0:
# tries = poisson(expected_scores['T'])
# else:
# tries = poisson(0.01)
# score = score + 6 * tries
# if expected_scores['P'] > 0:
# fgs = poisson(expected_scores['P'])
# else:
# fgs = poisson(0.01)
# score = score + 3 * fgs
# if expected_scores['DG'] > 0:
# sfs = poisson(expected_scores['DG'])
# else:
# sfs = poisson(0.01)
# score = score + 2 * sfs
# for t in range(tries):
# successful_con_determinant = uniform(0, 1)
# if successful_con_determinant <= expected_scores['CONPROB']:
# score += 2
# else:
# continue
# #if tries >= 4:
# # bp = True
# #else:
# # bp = False
# return (score, tries)
#def game(team_1, team_2,
# expected_scores_1, expected_scores_2,
# playoff = False): #Get two scores and determine a winner
# (score_1, tries_1) = get_score(expected_scores_1)
# (score_2, tries_2) = get_score(expected_scores_2)
# if tries_1 - tries_2 >= 3:
# bp1 = True
# bp2 = False
# elif tries_2 - tries_1 >= 3:
# bp1 = False
# bp2 = True
# else:
# bp1 = False
# bp2 = False
# if score_1 > score_2:
# win_1 = 1
# win_2 = 0
# draw_1 = 0
# draw_2 = 0
# if bp1:
# bpw1 = 1
# else:
# bpw1 = 0
# if bp2:
# bpl2 = 1
# else:
# bpl2 = 0
# bpl1 = 0
# bpw2 = 0
# bpd1 = 0
# bpd2 = 0
# lbp1 = 0
# if score_1 - score_2 <= 7:
# lbp2 = 1
# else:
# lbp2 = 0
# elif score_2 > score_1:
# win_1 = 0
# win_2 = 1
# draw_1 = 0
# draw_2 = 0
# if bp1:
# bpl1 = 1
# else:
# bpl1 = 0
# if bp2:
# bpw2 = 1
# else:
# bpw2 = 0
# bpw1 = 0
# bpl2 = 0
# bpd1 = 0
# bpd2 = 0
# lbp2 = 0
# if score_2 - score_1 <= 7:
# lbp1 = 1
# else:
# lbp1 = 0
# else:
# if playoff:
# win_1 = 0.5
# win_2 = 0.5
# draw_1 = 0
# draw_2 = 0
# bpw1 = 0
# bpw2 = 0
# bpd1 = 0
# bpd2 = 0
# bpl1 = 0
# bpl2 = 0
# lbp1 = 0
# lbp2 = 0
# else:
# win_1 = 0
# win_2 = 0
# draw_1 = 1
# draw_2 = 1
# bpw1 = 0
# bpw2 = 0
# bpl1 = 0
# bpl2 = 0
# lbp1 = 0
# lbp2 = 0
# if bp1:
# bpd1 = 1
# else:
# bpd1 = 0
# if bp2:
# bpd2 = 1
# else:
# bpd2 = 0
# summary = {team_1: [win_1, draw_1, score_1, bpw1, bpd1, bpl1, lbp1]}
# summary.update({team_2: [win_2, draw_2, score_2, bpw2, bpd2, bpl2, lbp2]})
# return summary
def get_expected_scores(team_1_stats, team_2_stats, team_1_df, team_2_df): #Get the expected scores for a matchup based on the previous teams' performances
expected_scores = {}
#print('\n')
#print('Residual Stats')
#print(team_1_stats)
#print(team_2_stats)
#print('\n')
for stat in team_1_stats:
expected_scores.update({'T': mean([team_1_stats['TF'] + np.average(team_2_df['TA'], weights = team_2_df['Weight']),
team_2_stats['TA'] + np.average(team_1_df['TF'], weights = team_1_df['Weight'])])})
expected_scores.update({'P': mean([team_1_stats['PF'] + np.average(team_2_df['PA'], weights = team_2_df['Weight']),
team_2_stats['PA'] + np.average(team_1_df['PF'], weights = team_1_df['Weight'])])})
expected_scores.update({'DG': mean([team_1_stats['DGF'] + np.average(team_2_df['DGA'], weights = team_2_df['Weight']),
team_2_stats['DGA'] + np.average(team_1_df['DGF'], weights = team_1_df['Weight'])])})
#expected_scores['T'] = max(expected_scores['T'], 0)
#expected_scores['P'] = max(expected_scores['P'], 0)
expected_scores['DG'] = max(expected_scores['DG'], 0)
#print mean([team_1_stats['PAT1%F'] + team_2_df['PAT1AS'].astype('float').sum() / team_2_df['PAT1AA'].sum(),
# team_2_stats['PAT1%A'] + team_1_df['PAT1FS'].astype('float').sum() / team_1_df['PAT1FA'].sum()])
try:
conprob = mean([team_1_stats['CON%F'] + (team_2_df['CA']*team_2_df['Weight']).sum() / (team_2_df['TA']*team_2_df['Weight']).sum(),
team_2_stats['CON%A'] + (team_1_df['CF']*team_1_df['Weight']).sum() / (team_1_df['TF']*team_1_df['Weight']).sum()])
except ZeroDivisionError:
conprob = 0.75
if not math.isnan(conprob):
conprob = min(max(conprob, 0.01), 0.99)
expected_scores.update({'CONPROB': conprob})
else:
expected_scores.update({'CONPROB': 0.75})
#print(expected_scores['PAT1PROB'])
#print(expected_scores)
return expected_scores
def geodesic_distance(olat, olng, dlat, dlng):
'''
Returns geodesic distance in percentage of half the earth's circumference between two points on the earth's surface
'''
scale = math.tau/360
olat *= scale
olng *= scale
dlat *= scale
dlng *= scale
delta_lat = (dlat - olat)
delta_lng = (dlng - olng)
a = math.sin(delta_lat/2)**2 + math.cos(olat)*math.cos(dlat)*math.sin(delta_lng/2)**2
return 4*math.atan2(math.sqrt(a), math.sqrt(1-a))/math.tau
def get_travel_weight(venue, home_lat, home_lng, reference_distance):
'''
Gets the travel weight based on a venue, a team's home lat/long coordinates, and a reference distance
'''
global stadium_locs
(venue_lat, venue_lng) = stadium_locs.loc[venue, ['Lat', 'Long']]
travel_distance = geodesic_distance(home_lat, home_lng, venue_lat, venue_lng)
return 1 - abs(travel_distance - reference_distance)
def get_score(expected_scores, score_array, n_sim, return_tries = True):
tf = sim_util.sim(expected_scores['T'][0], expected_scores['T'][1], n_sim)
cf = np.random.binomial(tf, expected_scores['C'])
pf = sim_util.sim(expected_scores['P'][0], expected_scores['P'][1], n_sim)
dgf = sim_util.sim(expected_scores['DG'][0], expected_scores['DG'][1], n_sim)
score = sim_util.calculate_score((tf, cf, pf, dgf), score_array)
if return_tries:
return score, tf
else:
return score
def matchup(team_1, team_2, venue = None):
ts = time.time()
global team_homes, stadium_locs
team_1_home = team_homes[1][team_1]
team_2_home = team_homes[1][team_2]
if venue is None:
venue = team_homes[1][team_1]
(venue_lat, venue_lng) = stadium_locs.loc[venue, ['Lat', 'Long']]
(team_1_home_lat, team_1_home_lng) = stadium_locs.loc[team_1_home, ['Lat', 'Long']]
(team_2_home_lat, team_2_home_lng) = stadium_locs.loc[team_2_home, ['Lat', 'Long']]
team_1_reference_distance = geodesic_distance(team_1_home_lat, team_1_home_lng, venue_lat, venue_lng)
team_2_reference_distance = geodesic_distance(team_2_home_lat, team_2_home_lng, venue_lat, venue_lng)
def get_team_1_weight(location):
return get_travel_weight(location, team_1_home_lat, team_1_home_lng, team_1_reference_distance)
def get_team_2_weight(location):
return get_travel_weight(location, team_2_home_lat, team_2_home_lng, team_2_reference_distance)
team_1_season = pd.DataFrame.from_csv(os.path.join(teamsheetpath, team_1 + '.csv'))
team_2_season = pd.DataFrame.from_csv(os.path.join(teamsheetpath, team_2 + '.csv'))
team_1_season['Weight'] = team_1_season['VENUE'].apply(get_team_1_weight)
team_2_season['Weight'] = team_2_season['VENUE'].apply(get_team_2_weight)
stats_1, variances_1 = get_residual_performance(team_1_season)
stats_2, variances_2 = get_residual_performance(team_2_season)
expected_scores_1 = get_expected_scores(stats_1, stats_2, team_1_season, team_2_season)
expected_scores_2 = get_expected_scores(stats_2, stats_1, team_2_season, team_1_season)
var_1 = pd.Series(0.25*(variances_1.loc[['TF', 'PF', 'DF']].values + variances_2.loc[['TA', 'PA', 'DGA']].values), ['T', 'P', 'DG'])
var_2 = pd.Series(0.25*(variances_2.loc[['TF', 'PF', 'DF']].values + variances_1.loc[['TA', 'PA', 'DGA']].values), ['T', 'P', 'DG'])
for stat in var_1.index:
if math.isnan(var_1[stat]):
var_1[stat] = expected_scores_1[stat]
if math.isnan(var_2[stat]):
var_2[stat] = expected_scores_2[stat]
score_array = [5, 2, 3, 3]
n_sim = int(5e6)
expected_scores_1a = {'T': (expected_scores_1['T'], var_1['T']),
'C': expected_scores_1['CONPROB'],
'P': (expected_scores_1['P'], var_1['P']),
'DG': (expected_scores_1['DG'], var_1['DG'])}
expected_scores_2a = {'T': (expected_scores_2['T'], var_2['T']),
'C': expected_scores_2['CONPROB'],
'P': (expected_scores_2['P'], var_2['P']),
'DG': (expected_scores_2['DG'], var_2['DG'])}
print(expected_scores_1a)
print(expected_scores_2a)
ts = time.time()
(team_1_scores, team_1_tries) = get_score(expected_scores_1a, score_array, n_sim)
(team_2_scores, team_2_tries) = get_score(expected_scores_2a, score_array, n_sim)
te = time.time()
print(te - ts)
(team_1_wins, team_2_wins, draws) = sim_util.eval_results(team_1_scores, team_2_scores, po)
(team_1_tb, team_2_tb) = sim_util.eval_try_bonus(team_1_tries, team_2_tries, 3)
(team_1_lb, team_2_lb) = sim_util.eval_losing_bonus(team_1_scores, team_2_scores, 7)
team_1_prob = team_1_wins.mean()
team_2_prob = team_2_wins.mean()
draw_prob = draws.mean()
team_1_bpw_prob = (team_1_tb * team_1_wins).mean()
team_1_bpd_prob = (team_1_tb * draws).mean()
team_1_bpl_prob = (team_1_tb * team_2_wins).mean()
team_1_lbp_prob = (team_1_lb).mean()
team_2_bpw_prob = (team_2_tb * team_2_wins).mean()
team_2_bpd_prob = (team_2_tb * draws).mean()
team_2_bpl_prob = (team_2_tb * team_1_wins).mean()
team_2_lbp_prob = (team_2_lb).mean()
games = pd.DataFrame.from_items([(team_1, team_1_scores), (team_2, team_2_scores)])
pre_summaries = games.describe(percentiles = list(np.linspace(0.05, 0.95, 19)))
summaries = pd.DataFrame(columns = pre_summaries.columns)
summaries.loc['mean'] = pre_summaries.loc['mean']
for i in pre_summaries.index:
try:
percentile = int(round(float(i[:-1])))
summaries.loc['{}%'.format(percentile)] = pre_summaries.loc[i]
except ValueError:
continue
summaries = summaries.reset_index()
for item in summaries.index:
try:
summaries['index'][item] = str(int(float(summaries['index'][item][:-1]))) + '%'
except ValueError:
continue
bonus_points = pd.DataFrame(index = ['4-Try Bonus Point with Win',
'4-Try Bonus Point with Draw',
'4-Try Bonus Point with Loss',
'Losing Bonus Point'])
bonus_points[team_1] = [team_1_bpw_prob, team_1_bpd_prob, team_1_bpl_prob, team_1_lbp_prob]
bonus_points[team_2] = [team_2_bpw_prob, team_2_bpd_prob, team_2_bpl_prob, team_2_lbp_prob]
summaries = summaries.set_index('index')
summaries = summaries.groupby(level = 0).last()
output = {'ProbWin': {team_1: team_1_prob, team_2: team_2_prob}, 'Scores': summaries, 'Bonus Points': bonus_points}
print(team_1 + '/' + team_2 + ' score distributions computed in ' + str(round(time.time() - ts, 1)) + ' seconds')
return output
|
mit
| 3,946,143,101,696,154,000
| 39.29927
| 162
| 0.55899
| false
| 2.941218
| false
| false
| false
|
pyokagan/gyp
|
pylib/gyp/simple_copy.py
|
1
|
1385
|
# Copyright 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A clone of the default copy.deepcopy that doesn't handle cyclic
structures or complex types except for dicts and lists. This is
because gyp copies so large structure that small copy overhead ends up
taking seconds in a project the size of Chromium."""
import sys
_PYTHON3 = sys.version_info >= (3, 0, 0)
if _PYTHON3:
long = int
unicode = str
class Error(Exception):
pass
__all__ = ["Error", "deepcopy"]
def deepcopy(x):
"""Deep copy operation on gyp objects such as strings, ints, dicts
and lists. More than twice as fast as copy.deepcopy but much less
generic."""
try:
return _deepcopy_dispatch[type(x)](x)
except KeyError:
raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
'or expand simple_copy support.' % type(x))
_deepcopy_dispatch = d = {}
def _deepcopy_atomic(x):
return x
for x in (type(None), int, long, float,
bool, str, unicode, type):
d[x] = _deepcopy_atomic
def _deepcopy_list(x):
return [deepcopy(a) for a in x]
d[list] = _deepcopy_list
def _deepcopy_dict(x):
y = {}
for key, value in x.items():
y[deepcopy(key)] = deepcopy(value)
return y
d[dict] = _deepcopy_dict
del d
# vim: expandtab tabstop=2 shiftwidth=2:
|
bsd-3-clause
| 4,918,599,992,610,725,000
| 22.474576
| 72
| 0.680866
| false
| 3.479899
| false
| false
| false
|
mice-software/maus
|
bin/user/simulate_mice.py
|
1
|
2151
|
#!/usr/bin/env python
"""
Simulate the MICE experiment
This will simulate MICE spills through the entirety of MICE using Geant4, then
digitize and reconstruct TOF and tracker hits to space points.
"""
import io # generic python library for I/O
import MAUS # MAUS libraries
def run():
""" Run the macro
"""
# This input generates empty spills, to be filled by the beam maker later on
my_input = MAUS.InputPySpillGenerator()
# Create an empty array of mappers, then populate it
# with the functionality you want to use.
my_map = MAUS.MapPyGroup()
# GEANT4
my_map.append(MAUS.MapPyBeamMaker()) # beam construction
my_map.append(MAUS.MapCppSimulation()) # geant4 simulation
# Pre detector set up
# my_map.append(MAUS.MapPyMCReconSetup()) # geant4 simulation
my_map.append(MAUS.MapCppMCReconSetup()) # geant4 simulation
# TOF
my_map.append(MAUS.MapCppTOFMCDigitizer()) # TOF MC Digitizer
my_map.append(MAUS.MapCppTOFSlabHits()) # TOF MC Slab Hits
my_map.append(MAUS.MapCppTOFSpacePoints()) # TOF Space Points
# KL
my_map.append(MAUS.MapCppKLMCDigitizer()) # KL MC Digitizer
my_map.append(MAUS.MapCppKLCellHits()) # KL CellHit Reco
# SciFi
my_map.append(MAUS.MapCppTrackerMCDigitization()) # SciFi electronics model
my_map.append(MAUS.MapCppTrackerRecon()) # SciFi Recon
# EMR
my_map.append(MAUS.MapCppEMRMCDigitization()) # EMR MC Digitizer
my_map.append(MAUS.MapCppEMRSpacePoints()) # EMR Space Points
my_map.append(MAUS.MapCppEMRRecon()) # EMR Recon
# Ckov
my_map.append(MAUS.MapCppCkovMCDigitizer())
# Global Digits - post detector digitisation
# Then construct a MAUS output component - filename comes from datacards
my_output = MAUS.OutputCppRoot()
# can specify datacards here or by using appropriate command line calls
datacards = io.StringIO(u"")
# The Go() drives all the components you pass in, then check the file
# (default simulation.out) for output
MAUS.Go(my_input, my_map, MAUS.ReducePyDoNothing(), my_output, datacards)
if __name__ == '__main__':
run()
|
gpl-3.0
| -5,412,434,296,677,713,000
| 30.632353
| 80
| 0.699675
| false
| 3.172566
| false
| false
| false
|
ibm-security-intelligence/api-samples
|
data_classification/02_LowLevelCategories.py
|
1
|
2598
|
#!/usr/bin/env python3
# This sample script demonstrates how to
# 1. get a list of low level categories
# 2. get a single low level category by its id
import importlib
import json
import os
import sys
sys.path.append(os.path.realpath('../modules'))
client_module = importlib.import_module('RestApiClient')
SampleUtilities = importlib.import_module('SampleUtilities')
def main():
# create the api client
client = client_module.RestApiClient(version='7.0')
# -------------------------------------------------------------------------
# 1. get a list of low level categories
endpoint_url = 'data_classification/low_level_categories'
http_method = 'GET'
# 'fields' is used to limit the fields returned for each record
fields = 'id, name'
# 'query_filter' is used to filter the list returned based on field values
# low_level_category_id can be used in the filter to get a list of low
# level categories belonging to the specified high level category
query_filter = 'high_level_category_id = 4000'
# 'sort' is used to sort list based on applicable fields
sort = '+id'
# populate the optional parameters to be used in request
params = {'fields': fields, 'filter': query_filter, 'sort': sort}
# send the request
response = client.call_api(endpoint_url, http_method, params=params,
print_request=True)
# check response and handle any error
if response.code == 200:
# extract records from response
low_level_categories = json.loads(response.read().decode('utf-8'))
print(low_level_categories)
else:
print('Failed to get the list of low level categories')
SampleUtilities.pretty_print_response(response)
sys.exit(1)
# -------------------------------------------------------------------------
# 2. get a single low level category by its id
low_level_category_id = 3001
endpoint_url = ('data_classification/low_level_categories' + '/' +
str(low_level_category_id))
# send the request
response = client.call_api(endpoint_url, http_method, print_request=True)
# check response and handle any error
if response.code == 200:
# extract record from response
low_level_category = json.loads(response.read().decode('utf-8'))
print(low_level_category)
else:
print('Failed to get the low level category with id=' +
str(low_level_category_id))
SampleUtilities.pretty_print_response(response)
if __name__ == "__main__":
main()
|
apache-2.0
| 4,357,113,796,614,164,000
| 32.74026
| 79
| 0.624326
| false
| 4.22439
| false
| false
| false
|
lukaasp/libs
|
aws_xray_sdk/ext/django/middleware.py
|
1
|
3458
|
import logging
import traceback
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.models.trace_header import TraceHeader
log = logging.getLogger(__name__)
# Django will rewrite some http request headers.
USER_AGENT_KEY = 'HTTP_USER_AGENT'
X_FORWARDED_KEY = 'HTTP_X_FORWARDED_FOR'
REMOTE_ADDR_KEY = 'REMOTE_ADDR'
XRAY_HEADER_KEY = 'HTTP_X_AMZN_TRACE_ID'
HOST_KEY = 'HTTP_HOST'
CONTENT_LENGTH_KEY = 'content-length'
class XRayMiddleware(object):
"""
Middleware that wraps each incoming request to a segment.
"""
def __init__(self, get_response):
self.get_response = get_response
# hooks for django version >= 1.10
def __call__(self, request):
# a segment name is required
name = xray_recorder.service
xray_header = self._get_tracing_header(request)
if not xray_header:
xray_header = TraceHeader()
sampling_decision = None
meta = request.META
# sampling decision from incoming request's header has highest precedence
if xray_header.sampled is not None:
sampling_decision = xray_header.sampled
elif not xray_recorder.sampling:
sampling_decision = 1
elif xray_recorder.sampler.should_trace(
service_name=meta.get(HOST_KEY),
method=request.method,
path=request.path,
):
sampling_decision = 1
else:
sampling_decision = 0
segment = xray_recorder.begin_segment(
name=name,
traceid=xray_header.root,
parent_id=xray_header.parent,
sampling=sampling_decision,
)
segment.put_http_meta(http.URL, request.build_absolute_uri())
segment.put_http_meta(http.METHOD, request.method)
if meta.get(USER_AGENT_KEY):
segment.put_http_meta(http.USER_AGENT, meta.get(USER_AGENT_KEY))
if meta.get(X_FORWARDED_KEY):
# X_FORWARDED_FOR may come from untrusted source so we
# need to set the flag to true as additional information
segment.put_http_meta(http.CLIENT_IP, meta.get(X_FORWARDED_KEY))
segment.put_http_meta(http.X_FORWARDED_FOR, True)
elif meta.get(REMOTE_ADDR_KEY):
segment.put_http_meta(http.CLIENT_IP, meta.get(REMOTE_ADDR_KEY))
response = self.get_response(request)
status_code = int(response.status_code)
segment.apply_status_code(status_code)
segment.put_http_meta(http.STATUS, status_code)
if response.has_header(CONTENT_LENGTH_KEY):
length = int(response[CONTENT_LENGTH_KEY])
segment.put_http_meta(http.CONTENT_LENGTH, length)
xray_recorder.end_segment()
return response
def process_exception(self, request, exception):
"""
Add exception information and fault flag to the
current segment.
"""
segment = xray_recorder.current_segment()
segment.add_fault_flag()
stack = traceback.extract_stack(limit=xray_recorder._max_trace_back)
segment.add_exception(exception, stack)
def _get_tracing_header(self, request):
header = request.META.get(http.XRAY_HEADER)
if not header:
header = request.META.get(XRAY_HEADER_KEY)
if not header:
return None
return TraceHeader.from_header_str(header)
|
unlicense
| -9,111,171,738,209,037,000
| 31.622642
| 81
| 0.633892
| false
| 3.787514
| false
| false
| false
|
yujikato/DIRAC
|
src/DIRAC/WorkloadManagementSystem/DB/PilotAgentsDB.py
|
1
|
43047
|
""" PilotAgentsDB class is a front-end to the Pilot Agent Database.
This database keeps track of all the submitted grid pilot jobs.
It also registers the mapping of the DIRAC jobs to the pilot
agents.
Available methods are:
addPilotTQReference()
setPilotStatus()
deletePilot()
clearPilots()
setPilotDestinationSite()
storePilotOutput()
getPilotOutput()
setJobForPilot()
getPilotsSummary()
getGroupedPilotSummary()
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
import six
import threading
import decimal
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Base.DB import DB
import DIRAC.Core.Utilities.Time as Time
from DIRAC.Core.Utilities import DErrno
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getCESiteMapping
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getUsernameForDN, getDNForUsername, getVOForGroup
from DIRAC.ResourceStatusSystem.Client.SiteStatus import SiteStatus
from DIRAC.Core.Utilities.MySQL import _quotedList
class PilotAgentsDB(DB):
def __init__(self):
super(PilotAgentsDB, self).__init__('PilotAgentsDB', 'WorkloadManagement/PilotAgentsDB')
self.lock = threading.Lock()
##########################################################################################
def addPilotTQReference(self, pilotRef, taskQueueID, ownerDN, ownerGroup, broker='Unknown',
gridType='DIRAC', pilotStampDict={}):
""" Add a new pilot job reference """
err = 'PilotAgentsDB.addPilotTQReference: Failed to retrieve a new Id.'
for ref in pilotRef:
stamp = ''
if ref in pilotStampDict:
stamp = pilotStampDict[ref]
res = self._escapeString(ownerDN)
if not res['OK']:
return res
escapedOwnerDN = res['Value']
req = "INSERT INTO PilotAgents( PilotJobReference, TaskQueueID, OwnerDN, " + \
"OwnerGroup, Broker, GridType, SubmissionTime, LastUpdateTime, Status, PilotStamp ) " + \
"VALUES ('%s',%d,%s,'%s','%s','%s',UTC_TIMESTAMP(),UTC_TIMESTAMP(),'Submitted','%s')" % \
(ref, int(taskQueueID), escapedOwnerDN, ownerGroup, broker, gridType, stamp)
result = self._update(req)
if not result['OK']:
return result
if 'lastRowId' not in result:
return S_ERROR('%s' % err)
return S_OK()
##########################################################################################
def setPilotStatus(self, pilotRef, status, destination=None,
statusReason=None, gridSite=None, queue=None,
benchmark=None, currentJob=None,
updateTime=None, conn=False):
""" Set pilot job status
"""
setList = []
setList.append("Status='%s'" % status)
if updateTime:
setList.append("LastUpdateTime='%s'" % updateTime)
else:
setList.append("LastUpdateTime=UTC_TIMESTAMP()")
if not statusReason:
statusReason = "Not given"
setList.append("StatusReason='%s'" % statusReason)
if gridSite:
setList.append("GridSite='%s'" % gridSite)
if queue:
setList.append("Queue='%s'" % queue)
if benchmark:
setList.append("BenchMark='%s'" % float(benchmark))
if currentJob:
setList.append("CurrentJobID='%s'" % int(currentJob))
if destination:
setList.append("DestinationSite='%s'" % destination)
if not gridSite:
res = getCESiteMapping(destination)
if res['OK'] and res['Value']:
setList.append("GridSite='%s'" % res['Value'][destination])
set_string = ','.join(setList)
req = "UPDATE PilotAgents SET " + set_string + " WHERE PilotJobReference='%s'" % pilotRef
result = self._update(req, conn=conn)
if not result['OK']:
return result
return S_OK()
# ###########################################################################################
# FIXME: this can't work ATM because of how the DB table is made. Maybe it would be useful later.
# def setPilotStatusBulk(self, pilotRefsStatusDict=None, statusReason=None,
# conn=False):
# """ Set pilot job status in a bulk
# """
# if not pilotRefsStatusDict:
# return S_OK()
# # Building the request with "ON DUPLICATE KEY UPDATE"
# reqBase = "INSERT INTO PilotAgents (PilotJobReference, Status, StatusReason) VALUES "
# for pilotJobReference, status in pilotRefsStatusDict.items():
# req = reqBase + ','.join("('%s', '%s', '%s')" % (pilotJobReference, status, statusReason))
# req += " ON DUPLICATE KEY UPDATE Status=VALUES(Status),StatusReason=VALUES(StatusReason)"
# return self._update(req, conn)
##########################################################################################
def selectPilots(self, condDict, older=None, newer=None, timeStamp='SubmissionTime',
orderAttribute=None, limit=None):
""" Select pilot references according to the provided criteria. "newer" and "older"
specify the time interval in minutes
"""
condition = self.buildCondition(condDict, older, newer, timeStamp)
if orderAttribute:
orderType = None
orderField = orderAttribute
if orderAttribute.find(':') != -1:
orderType = orderAttribute.split(':')[1].upper()
orderField = orderAttribute.split(':')[0]
condition = condition + ' ORDER BY ' + orderField
if orderType:
condition = condition + ' ' + orderType
if limit:
condition = condition + ' LIMIT ' + str(limit)
req = "SELECT PilotJobReference from PilotAgents"
if condition:
req += " %s " % condition
result = self._query(req)
if not result['OK']:
return result
pilotList = []
if result['Value']:
pilotList = [x[0] for x in result['Value']]
return S_OK(pilotList)
##########################################################################################
def countPilots(self, condDict, older=None, newer=None, timeStamp='SubmissionTime'):
""" Select pilot references according to the provided criteria. "newer" and "older"
specify the time interval in minutes
"""
condition = self.buildCondition(condDict, older, newer, timeStamp)
req = "SELECT COUNT(PilotID) from PilotAgents"
if condition:
req += " %s " % condition
result = self._query(req)
if not result['OK']:
return result
return S_OK(result['Value'][0][0])
#########################################################################################
def getPilotGroups(self, groupList=['Status', 'OwnerDN', 'OwnerGroup', 'GridType'], condDict={}):
"""
Get all exisiting combinations of groupList Values
"""
cmd = 'SELECT %s from PilotAgents ' % ', '.join(groupList)
condList = []
for cond in condDict:
condList.append('%s in ( "%s" )' % (cond, '", "'.join([str(y) for y in condDict[cond]])))
# the conditions should be escaped before hand, so it is not really nice to expose it this way...
if condList:
cmd += ' WHERE %s ' % ' AND '.join(condList)
cmd += ' GROUP BY %s' % ', '.join(groupList)
return self._query(cmd)
##########################################################################################
def deletePilots(self, pilotIDs, conn=False):
""" Delete Pilots with IDs in the given list from the PilotAgentsDB """
if not isinstance(pilotIDs, list):
return S_ERROR('Input argument is not a List')
failed = []
for table in ['PilotOutput', 'JobToPilotMapping', 'PilotAgents']:
idString = ','.join([str(pid) for pid in pilotIDs])
req = "DELETE FROM %s WHERE PilotID in ( %s )" % (table, idString)
result = self._update(req, conn=conn)
if not result['OK']:
failed.append(table)
if failed:
return S_ERROR('Failed to remove pilot from %s tables' % ', '.join(failed))
return S_OK(pilotIDs)
##########################################################################################
def deletePilot(self, pilotRef, conn=False):
""" Delete Pilot with the given reference from the PilotAgentsDB """
if isinstance(pilotRef, six.string_types):
pilotID = self.__getPilotID(pilotRef)
else:
pilotID = pilotRef
return self.deletePilots([pilotID], conn=conn)
##########################################################################################
def clearPilots(self, interval=30, aborted_interval=7):
""" Delete all the pilot references submitted before <interval> days """
reqList = []
reqList.append(
"SELECT PilotID FROM PilotAgents WHERE SubmissionTime < DATE_SUB(UTC_TIMESTAMP(),INTERVAL %d DAY)" %
interval)
reqList.append(
"SELECT PilotID FROM PilotAgents WHERE Status='Aborted' \
AND SubmissionTime < DATE_SUB(UTC_TIMESTAMP(),INTERVAL %d DAY)" %
aborted_interval)
idList = None
for req in reqList:
result = self._query(req)
if not result['OK']:
self.log.warn('Error while clearing up pilots')
else:
if result['Value']:
idList = [x[0] for x in result['Value']]
result = self.deletePilots(idList)
if not result['OK']:
self.log.warn('Error while deleting pilots')
return S_OK(idList)
##########################################################################################
def getPilotInfo(self, pilotRef=False, parentId=False, conn=False, paramNames=[], pilotID=False):
""" Get all the information for the pilot job reference or reference list
"""
parameters = ['PilotJobReference', 'OwnerDN', 'OwnerGroup', 'GridType', 'Broker',
'Status', 'DestinationSite', 'BenchMark', 'ParentID', 'OutputReady', 'AccountingSent',
'SubmissionTime', 'PilotID', 'LastUpdateTime', 'TaskQueueID', 'GridSite', 'PilotStamp',
'Queue']
if paramNames:
parameters = paramNames
cmd = "SELECT %s FROM PilotAgents" % ", ".join(parameters)
condSQL = []
if pilotRef:
if isinstance(pilotRef, list):
condSQL.append("PilotJobReference IN (%s)" % ",".join(['"%s"' % x for x in pilotRef]))
else:
condSQL.append("PilotJobReference = '%s'" % pilotRef)
if pilotID:
if isinstance(pilotID, list):
condSQL.append("PilotID IN (%s)" % ",".join(['%s' % x for x in pilotID]))
else:
condSQL.append("PilotID = '%s'" % pilotID)
if parentId:
if isinstance(parentId, list):
condSQL.append("ParentID IN (%s)" % ",".join(['%s' % x for x in parentId]))
else:
condSQL.append("ParentID = %s" % parentId)
if condSQL:
cmd = "%s WHERE %s" % (cmd, " AND ".join(condSQL))
result = self._query(cmd, conn=conn)
if not result['OK']:
return result
if not result['Value']:
msg = "No pilots found"
if pilotRef:
msg += " for PilotJobReference(s): %s" % pilotRef
if parentId:
msg += " with parent id: %s" % parentId
return S_ERROR(DErrno.EWMSNOPILOT, msg)
resDict = {}
pilotIDs = []
for row in result['Value']:
pilotDict = {}
for i in range(len(parameters)):
pilotDict[parameters[i]] = row[i]
if parameters[i] == 'PilotID':
pilotIDs.append(row[i])
resDict[row[0]] = pilotDict
result = self.getJobsForPilot(pilotIDs)
if not result['OK']:
return S_OK(resDict)
jobsDict = result['Value']
for pilotRef in resDict:
pilotInfo = resDict[pilotRef]
pilotID = pilotInfo['PilotID']
if pilotID in jobsDict:
pilotInfo['Jobs'] = jobsDict[pilotID]
return S_OK(resDict)
##########################################################################################
def setPilotDestinationSite(self, pilotRef, destination, conn=False):
""" Set the pilot agent destination site
"""
gridSite = 'Unknown'
res = getCESiteMapping(destination)
if res['OK'] and res['Value']:
gridSite = res['Value'][destination]
req = "UPDATE PilotAgents SET DestinationSite='%s', GridSite='%s' WHERE PilotJobReference='%s'"
req = req % (destination, gridSite, pilotRef)
return self._update(req, conn=conn)
##########################################################################################
def setPilotBenchmark(self, pilotRef, mark):
""" Set the pilot agent benchmark
"""
req = "UPDATE PilotAgents SET BenchMark='%f' WHERE PilotJobReference='%s'" % (mark, pilotRef)
result = self._update(req)
return result
##########################################################################################
def setAccountingFlag(self, pilotRef, mark='True'):
""" Set the pilot AccountingSent flag
"""
req = "UPDATE PilotAgents SET AccountingSent='%s' WHERE PilotJobReference='%s'" % (mark, pilotRef)
result = self._update(req)
return result
##########################################################################################
def storePilotOutput(self, pilotRef, output, error):
""" Store standard output and error for a pilot with pilotRef
"""
pilotID = self.__getPilotID(pilotRef)
if not pilotID:
return S_ERROR('Pilot reference not found %s' % pilotRef)
result = self._escapeString(output)
if not result['OK']:
return S_ERROR('Failed to escape output string')
e_output = result['Value']
result = self._escapeString(error)
if not result['OK']:
return S_ERROR('Failed to escape error string')
e_error = result['Value']
req = "INSERT INTO PilotOutput (PilotID,StdOutput,StdError) VALUES (%d,%s,%s)" % (pilotID, e_output, e_error)
result = self._update(req)
if not result['OK']:
return result
req = "UPDATE PilotAgents SET OutputReady='True' where PilotID=%d" % pilotID
return self._update(req)
##########################################################################################
def getPilotOutput(self, pilotRef):
""" Retrieve standard output and error for pilot with pilotRef
"""
req = "SELECT StdOutput, StdError FROM PilotOutput,PilotAgents WHERE "
req += "PilotOutput.PilotID = PilotAgents.PilotID AND PilotAgents.PilotJobReference='%s'" % pilotRef
result = self._query(req)
if not result['OK']:
return result
else:
if result['Value']:
stdout = result['Value'][0][0]
error = result['Value'][0][1]
if stdout == '""':
stdout = ''
if error == '""':
error = ''
return S_OK({'StdOut': stdout, 'StdErr': error})
else:
return S_ERROR('PilotJobReference ' + pilotRef + ' not found')
##########################################################################################
def __getPilotID(self, pilotRef):
""" Get Pilot ID for the given pilot reference or a list of references
"""
if isinstance(pilotRef, six.string_types):
req = "SELECT PilotID from PilotAgents WHERE PilotJobReference='%s'" % pilotRef
result = self._query(req)
if not result['OK']:
return 0
else:
if result['Value']:
return int(result['Value'][0][0])
return 0
else:
refString = ','.join(["'" + ref + "'" for ref in pilotRef])
req = "SELECT PilotID from PilotAgents WHERE PilotJobReference in ( %s )" % refString
result = self._query(req)
if not result['OK']:
return []
if result['Value']:
return [x[0] for x in result['Value']]
return []
##########################################################################################
def setJobForPilot(self, jobID, pilotRef, site=None, updateStatus=True):
""" Store the jobID of the job executed by the pilot with reference pilotRef
"""
pilotID = self.__getPilotID(pilotRef)
if pilotID:
if updateStatus:
reason = 'Report from job %d' % int(jobID)
result = self.setPilotStatus(pilotRef, status='Running', statusReason=reason,
gridSite=site)
if not result['OK']:
return result
req = "INSERT INTO JobToPilotMapping (PilotID,JobID,StartTime) VALUES (%d,%d,UTC_TIMESTAMP())" % (pilotID, jobID)
return self._update(req)
else:
return S_ERROR('PilotJobReference ' + pilotRef + ' not found')
##########################################################################################
def setCurrentJobID(self, pilotRef, jobID):
""" Set the pilot agent current DIRAC job ID
"""
req = "UPDATE PilotAgents SET CurrentJobID=%d WHERE PilotJobReference='%s'" % (jobID, pilotRef)
return self._update(req)
##########################################################################################
def getJobsForPilot(self, pilotID):
""" Get IDs of Jobs that were executed by a pilot
"""
cmd = "SELECT pilotID,JobID FROM JobToPilotMapping "
if isinstance(pilotID, list):
cmd = cmd + " WHERE pilotID IN (%s)" % ",".join(['%s' % x for x in pilotID])
else:
cmd = cmd + " WHERE pilotID = %s" % pilotID
result = self._query(cmd)
if not result['OK']:
return result
resDict = {}
for row in result['Value']:
if not row[0] in resDict:
resDict[row[0]] = []
resDict[row[0]].append(row[1])
return S_OK(resDict)
##########################################################################################
def getPilotsForTaskQueue(self, taskQueueID, gridType=None, limit=None):
""" Get IDs of Pilot Agents that were submitted for the given taskQueue,
specify optionally the grid type, results are sorted by Submission time
an Optional limit can be set.
"""
if gridType:
req = "SELECT PilotID FROM PilotAgents WHERE TaskQueueID=%s AND GridType='%s' " % (taskQueueID, gridType)
else:
req = "SELECT PilotID FROM PilotAgents WHERE TaskQueueID=%s " % taskQueueID
req += 'ORDER BY SubmissionTime DESC '
if limit:
req += 'LIMIT %s' % limit
result = self._query(req)
if not result['OK']:
return result
else:
if result['Value']:
pilotList = [x[0] for x in result['Value']]
return S_OK(pilotList)
return S_ERROR('PilotJobReferences for TaskQueueID %s not found' % taskQueueID)
##########################################################################################
def getPilotsForJobID(self, jobID):
""" Get ID of Pilot Agent that is running a given JobID
"""
result = self._query('SELECT PilotID FROM JobToPilotMapping WHERE JobID=%s' % jobID)
if not result['OK']:
self.log.error("getPilotsForJobID failed", result['Message'])
return result
if result['Value']:
pilotList = [x[0] for x in result['Value']]
return S_OK(pilotList)
self.log.warn('PilotID for job %d not found: not matched yet?' % jobID)
return S_OK([])
##########################################################################################
def getPilotCurrentJob(self, pilotRef):
""" The job ID currently executed by the pilot
"""
req = "SELECT CurrentJobID FROM PilotAgents WHERE PilotJobReference='%s' " % pilotRef
result = self._query(req)
if not result['OK']:
return result
if result['Value']:
jobID = int(result['Value'][0][0])
return S_OK(jobID)
self.log.warn('Current job ID for pilot %s is not known: pilot did not match jobs yet?' % pilotRef)
return S_OK()
##########################################################################################
# FIXME: investigate it getPilotSummaryShort can replace this method
def getPilotSummary(self, startdate='', enddate=''):
""" Get summary of the pilot jobs status by site
"""
st_list = ['Aborted', 'Running', 'Done', 'Submitted', 'Ready', 'Scheduled', 'Waiting']
summary_dict = {}
summary_dict['Total'] = {}
for st in st_list:
summary_dict['Total'][st] = 0
req = "SELECT DestinationSite,count(DestinationSite) FROM PilotAgents " + \
"WHERE Status='%s' " % st
if startdate:
req = req + " AND SubmissionTime >= '%s'" % startdate
if enddate:
req = req + " AND SubmissionTime <= '%s'" % enddate
req = req + " GROUP BY DestinationSite"
result = self._query(req)
if not result['OK']:
return result
else:
if result['Value']:
for res in result['Value']:
site = res[0]
count = res[1]
if site:
if site not in summary_dict:
summary_dict[site] = {}
summary_dict[site][st] = int(count)
summary_dict['Total'][st] += int(count)
# Get aborted pilots in the last hour, day
req = "SELECT DestinationSite,count(DestinationSite) FROM PilotAgents WHERE Status='Aborted' AND "
reqDict = {}
reqDict['Aborted_Hour'] = req + " LastUpdateTime >= DATE_SUB(UTC_TIMESTAMP(), INTERVAL 1 HOUR)"
reqDict['Aborted_Day'] = req + " LastUpdateTime >= DATE_SUB(UTC_TIMESTAMP(), INTERVAL 1 DAY)"
for key, req in reqDict.items():
result = self._query(req)
if not result['OK']:
break
if result['Value']:
for res in result['Value']:
site = res[0]
count = res[1]
if site:
if site in summary_dict:
summary_dict[site][key] = int(count)
return S_OK(summary_dict)
# def getPilotSummaryShort( self, startTimeWindow = None, endTimeWindow = None, ce = '' ):
# """
# Spin off the method getPilotSummary. It is doing things in such a way that
# do not make much sense. This method returns the pilots that were updated in the
# time window [ startTimeWindow, endTimeWindow ), if they are present.
# """
#
# sqlSelect = 'SELECT DestinationSite,Status,count(Status) FROM PilotAgents'
#
# whereSelect = []
#
# if startTimeWindow is not None:
# whereSelect.append( ' LastUpdateTime >= "%s"' % startTimeWindow )
# if endTimeWindow is not None:
# whereSelect.append( ' LastUpdateTime < "%s"' % endTimeWindow )
# if ce:
# whereSelect.append( ' DestinationSite = "%s"' % ce )
#
# if whereSelect:
# sqlSelect += ' WHERE'
# sqlSelect += ' AND'.join( whereSelect )
#
# sqlSelect += ' GROUP BY DestinationSite,Status'
#
# resSelect = self._query( sqlSelect )
# if not resSelect[ 'OK' ]:
# return resSelect
#
# result = { 'Total' : collections.defaultdict( int ) }
#
# for row in resSelect[ 'Value' ]:
#
# ceName, statusName, statusCount = row
#
# if not ceName in result:
# result[ ceName ] = {}
# result[ ceName ][ statusName ] = int( statusCount )
#
# result[ 'Total' ][ statusName ] += int( statusCount )
#
# return S_OK( result )
##########################################################################################
def getGroupedPilotSummary(self, selectDict, columnList):
"""
The simplified pilot summary based on getPilotSummaryWeb method. It calculates pilot efficiency
based on the same algorithm as in the Web version, basically takes into account Done and
Aborted pilots only from the last day. The selection is done entirely in SQL.
:param dict selectDict: A dictionary to pass additional conditions to select statements, i.e.
it allows to define start time for Done and Aborted Pilots.
:param list columnList: A list of column to consider when grouping to calculate efficiencies.
e.g. ['GridSite', 'DestinationSite'] is used to calculate efficiencies
for sites and CEs. If we want to add an OwnerGroup it would be:
['GridSite', 'DestinationSite', 'OwnerGroup'].
:return: S_OK/S_ERROR with a dict containing the ParameterNames and Records lists.
"""
table = PivotedPilotSummaryTable(columnList)
sqlQuery = table.buildSQL()
self.logger.debug("SQL query : ")
self.logger.debug("\n" + sqlQuery)
res = self._query(sqlQuery)
if not res['OK']:
return res
self.logger.info(res)
# TODO add site or CE status, while looping
rows = []
columns = table.getColumnList()
try:
groupIndex = columns.index('OwnerGroup')
# should probably change a column name to VO here as well to avoid confusion
except ValueError:
groupIndex = None
result = {'ParameterNames': columns}
multiple = False
# If not grouped by CE:
if 'CE' not in columns:
multiple = True
for row in res['Value']:
lrow = list(row)
if groupIndex:
lrow[groupIndex] = getVOForGroup(row[groupIndex])
if multiple:
lrow.append('Multiple')
for index, value in enumerate(row):
if isinstance(value, decimal.Decimal):
lrow[index] = float(value)
# get the value of the Total column
if 'Total' in columnList:
total = lrow[columnList.index('Total')]
else:
total = 0
if 'PilotJobEff' in columnList:
eff = lrow[columnList.index('PilotJobEff')]
else:
eff = 0.
lrow.append(self._getElementStatus(total, eff))
rows.append(list(lrow))
# If not grouped by CE and more then 1 CE in the result:
if multiple:
columns.append('CE') # 'DestinationSite' re-mapped to 'CE' already
columns.append('Status')
result['Records'] = rows
return S_OK(result)
def _getElementStatus(self, total, eff):
"""
Assign status to a site or resource based on pilot efficiency.
:param total: number of pilots to assign the status, otherwise 'Idle'
:param eff: efficiency in %
:return: status string
"""
# Evaluate the quality status of the Site/CE
if total > 10:
if eff < 25.:
return 'Bad'
elif eff < 60.:
return 'Poor'
elif eff < 85.:
return 'Fair'
else:
return 'Good'
else:
return 'Idle'
def getPilotSummaryWeb(self, selectDict, sortList, startItem, maxItems):
""" Get summary of the pilot jobs status by CE/site in a standard structure
"""
stateNames = ['Submitted', 'Ready', 'Scheduled', 'Waiting', 'Running', 'Done', 'Aborted', 'Failed']
allStateNames = stateNames + ['Done_Empty', 'Aborted_Hour']
paramNames = ['Site', 'CE'] + allStateNames
last_update = None
if 'LastUpdateTime' in selectDict:
last_update = selectDict['LastUpdateTime']
del selectDict['LastUpdateTime']
site_select = []
if 'GridSite' in selectDict:
site_select = selectDict['GridSite']
if not isinstance(site_select, list):
site_select = [site_select]
del selectDict['GridSite']
status_select = []
if 'Status' in selectDict:
status_select = selectDict['Status']
if not isinstance(status_select, list):
status_select = [status_select]
del selectDict['Status']
expand_site = ''
if 'ExpandSite' in selectDict:
expand_site = selectDict['ExpandSite']
site_select = [expand_site]
del selectDict['ExpandSite']
# Get all the data from the database with various selections
result = self.getCounters('PilotAgents',
['GridSite', 'DestinationSite', 'Status'],
selectDict, newer=last_update, timeStamp='LastUpdateTime')
if not result['OK']:
return result
last_update = Time.dateTime() - Time.hour
selectDict['Status'] = 'Aborted'
resultHour = self.getCounters('PilotAgents',
['GridSite', 'DestinationSite', 'Status'],
selectDict, newer=last_update, timeStamp='LastUpdateTime')
if not resultHour['OK']:
return resultHour
last_update = Time.dateTime() - Time.day
selectDict['Status'] = ['Aborted', 'Done']
resultDay = self.getCounters('PilotAgents',
['GridSite', 'DestinationSite', 'Status'],
selectDict, newer=last_update, timeStamp='LastUpdateTime')
if not resultDay['OK']:
return resultDay
selectDict['CurrentJobID'] = 0
selectDict['Status'] = 'Done'
resultDayEmpty = self.getCounters('PilotAgents',
['GridSite', 'DestinationSite', 'Status'],
selectDict, newer=last_update, timeStamp='LastUpdateTime')
if not resultDayEmpty['OK']:
return resultDayEmpty
ceMap = {}
resMap = getCESiteMapping()
if resMap['OK']:
ceMap = resMap['Value']
# Sort out different counters
resultDict = {}
resultDict['Unknown'] = {}
for attDict, count in result['Value']:
site = attDict['GridSite']
ce = attDict['DestinationSite']
state = attDict['Status']
if site == 'Unknown' and ce != "Unknown" and ce != "Multiple" and ce in ceMap:
site = ceMap[ce]
if site not in resultDict:
resultDict[site] = {}
if ce not in resultDict[site]:
resultDict[site][ce] = {}
for p in allStateNames:
resultDict[site][ce][p] = 0
resultDict[site][ce][state] = count
for attDict, count in resultDay['Value']:
site = attDict['GridSite']
ce = attDict['DestinationSite']
state = attDict['Status']
if site == 'Unknown' and ce != "Unknown" and ce in ceMap:
site = ceMap[ce]
if state == "Done":
resultDict[site][ce]["Done"] = count
if state == "Aborted":
resultDict[site][ce]["Aborted"] = count
for attDict, count in resultDayEmpty['Value']:
site = attDict['GridSite']
ce = attDict['DestinationSite']
state = attDict['Status']
if site == 'Unknown' and ce != "Unknown" and ce in ceMap:
site = ceMap[ce]
if state == "Done":
resultDict[site][ce]["Done_Empty"] = count
for attDict, count in resultHour['Value']:
site = attDict['GridSite']
ce = attDict['DestinationSite']
state = attDict['Status']
if site == 'Unknown' and ce != "Unknown" and ce in ceMap:
site = ceMap[ce]
if state == "Aborted":
resultDict[site][ce]["Aborted_Hour"] = count
records = []
siteSumDict = {}
for site in resultDict:
sumDict = {}
for state in allStateNames:
if state not in sumDict:
sumDict[state] = 0
sumDict['Total'] = 0
for ce in resultDict[site]:
itemList = [site, ce]
total = 0
for state in allStateNames:
itemList.append(resultDict[site][ce][state])
sumDict[state] += resultDict[site][ce][state]
if state == "Done":
done = resultDict[site][ce][state]
if state == "Done_Empty":
empty = resultDict[site][ce][state]
if state == "Aborted":
aborted = resultDict[site][ce][state]
if state != "Aborted_Hour" and state != "Done_Empty":
total += resultDict[site][ce][state]
sumDict['Total'] += total
# Add the total number of pilots seen in the last day
itemList.append(total)
# Add pilot submission efficiency evaluation
if (done - empty) > 0:
eff = done / (done - empty)
elif done == 0:
eff = 0.
elif empty == done:
eff = 99.
else:
eff = 0.
itemList.append('%.2f' % eff)
# Add pilot job efficiency evaluation
if total > 0:
eff = (total - aborted) / total * 100
else:
eff = 100.
itemList.append('%.2f' % eff)
# Evaluate the quality status of the CE
if total > 10:
if eff < 25.:
itemList.append('Bad')
elif eff < 60.:
itemList.append('Poor')
elif eff < 85.:
itemList.append('Fair')
else:
itemList.append('Good')
else:
itemList.append('Idle')
if len(resultDict[site]) == 1 or expand_site:
records.append(itemList)
if len(resultDict[site]) > 1 and not expand_site:
itemList = [site, 'Multiple']
for state in allStateNames + ['Total']:
if state in sumDict:
itemList.append(sumDict[state])
else:
itemList.append(0)
done = sumDict["Done"]
empty = sumDict["Done_Empty"]
aborted = sumDict["Aborted"]
total = sumDict["Total"]
# Add pilot submission efficiency evaluation
if (done - empty) > 0:
eff = done / (done - empty)
elif done == 0:
eff = 0.
elif empty == done:
eff = 99.
else:
eff = 0.
itemList.append('%.2f' % eff)
# Add pilot job efficiency evaluation
if total > 0:
eff = (total - aborted) / total * 100
else:
eff = 100.
itemList.append('%.2f' % eff)
# Evaluate the quality status of the Site
if total > 10:
if eff < 25.:
itemList.append('Bad')
elif eff < 60.:
itemList.append('Poor')
elif eff < 85.:
itemList.append('Fair')
else:
itemList.append('Good')
else:
itemList.append('Idle')
records.append(itemList)
for state in allStateNames + ['Total']:
if state not in siteSumDict:
siteSumDict[state] = sumDict[state]
else:
siteSumDict[state] += sumDict[state]
# Perform site selection
if site_select:
new_records = []
for r in records:
if r[0] in site_select:
new_records.append(r)
records = new_records
# Perform status selection
if status_select:
new_records = []
for r in records:
if r[14] in status_select:
new_records.append(r)
records = new_records
# Get the Site Mask data
result = SiteStatus().getUsableSites()
if result['OK']:
siteMask = result['Value']
for r in records:
if r[0] in siteMask:
r.append('Yes')
else:
r.append('No')
else:
for r in records:
r.append('Unknown')
finalDict = {}
finalDict['TotalRecords'] = len(records)
finalDict['ParameterNames'] = paramNames + \
['Total', 'PilotsPerJob', 'PilotJobEff', 'Status', 'InMask']
# Return all the records if maxItems == 0 or the specified number otherwise
if maxItems:
finalDict['Records'] = records[startItem:startItem + maxItems]
else:
finalDict['Records'] = records
done = siteSumDict["Done"]
empty = siteSumDict["Done_Empty"]
aborted = siteSumDict["Aborted"]
total = siteSumDict["Total"]
# Add pilot submission efficiency evaluation
if (done - empty) > 0:
eff = done / (done - empty)
elif done == 0:
eff = 0.
elif empty == done:
eff = 99.
else:
eff = 0.
siteSumDict['PilotsPerJob'] = '%.2f' % eff
# Add pilot job efficiency evaluation
if total > 0:
eff = (total - aborted) / total * 100
else:
eff = 100.
siteSumDict['PilotJobEff'] = '%.2f' % eff
# Evaluate the overall quality status
if total > 100:
if eff < 25.:
siteSumDict['Status'] = 'Bad'
elif eff < 60.:
siteSumDict['Status'] = 'Poor'
elif eff < 85.:
siteSumDict['Status'] = 'Fair'
else:
siteSumDict['Status'] = 'Good'
else:
siteSumDict['Status'] = 'Idle'
finalDict['Extras'] = siteSumDict
return S_OK(finalDict)
##########################################################################################
def getPilotMonitorSelectors(self):
""" Get distinct values for the Pilot Monitor page selectors
"""
paramNames = ['OwnerDN', 'OwnerGroup', 'GridType', 'Broker',
'Status', 'DestinationSite', 'GridSite']
resultDict = {}
for param in paramNames:
result = self.getDistinctAttributeValues('PilotAgents', param)
if result['OK']:
resultDict[param] = result['Value']
else:
resultDict = []
if param == "OwnerDN":
userList = []
for dn in result['Value']:
resultUser = getUsernameForDN(dn)
if resultUser['OK']:
userList.append(resultUser['Value'])
resultDict["Owner"] = userList
return S_OK(resultDict)
##########################################################################################
def getPilotMonitorWeb(self, selectDict, sortList, startItem, maxItems):
""" Get summary of the pilot job information in a standard structure
"""
resultDict = {}
if 'LastUpdateTime' in selectDict:
del selectDict['LastUpdateTime']
if 'Owner' in selectDict:
userList = selectDict['Owner']
if not isinstance(userList, list):
userList = [userList]
dnList = []
for uName in userList:
uList = getDNForUsername(uName)['Value']
dnList += uList
selectDict['OwnerDN'] = dnList
del selectDict['Owner']
startDate = selectDict.get('FromDate', None)
if startDate:
del selectDict['FromDate']
# For backward compatibility
if startDate is None:
startDate = selectDict.get('LastUpdateTime', None)
if startDate:
del selectDict['LastUpdateTime']
endDate = selectDict.get('ToDate', None)
if endDate:
del selectDict['ToDate']
# Sorting instructions. Only one for the moment.
if sortList:
orderAttribute = sortList[0][0] + ":" + sortList[0][1]
else:
orderAttribute = None
# Select pilots for the summary
result = self.selectPilots(
selectDict,
orderAttribute=orderAttribute,
newer=startDate,
older=endDate,
timeStamp='LastUpdateTime')
if not result['OK']:
return S_ERROR('Failed to select pilots: ' + result['Message'])
pList = result['Value']
nPilots = len(pList)
resultDict['TotalRecords'] = nPilots
if nPilots == 0:
return S_OK(resultDict)
ini = startItem
last = ini + maxItems
if ini >= nPilots:
return S_ERROR('Item number out of range')
if last > nPilots:
last = nPilots
pilotList = pList[ini:last]
paramNames = ['PilotJobReference', 'OwnerDN', 'OwnerGroup', 'GridType', 'Broker',
'Status', 'DestinationSite', 'BenchMark', 'ParentID',
'SubmissionTime', 'PilotID', 'LastUpdateTime', 'CurrentJobID', 'TaskQueueID',
'GridSite']
result = self.getPilotInfo(pilotList, paramNames=paramNames)
if not result['OK']:
return S_ERROR('Failed to get pilot info: ' + result['Message'])
pilotDict = result['Value']
records = []
for pilot in pilotList:
parList = []
for parameter in paramNames:
if not isinstance(pilotDict[pilot][parameter], six.integer_types):
parList.append(str(pilotDict[pilot][parameter]))
else:
parList.append(pilotDict[pilot][parameter])
if parameter == 'GridSite':
gridSite = pilotDict[pilot][parameter]
# If the Grid Site is unknown try to recover it in the last moment
if gridSite == "Unknown":
ce = pilotDict[pilot]['DestinationSite']
result = getCESiteMapping(ce)
if result['OK']:
gridSite = result['Value'].get(ce)
del parList[-1]
parList.append(gridSite)
records.append(parList)
resultDict['ParameterNames'] = paramNames
resultDict['Records'] = records
return S_OK(resultDict)
class PivotedPilotSummaryTable:
"""
The class creates a 'pivoted' table by combining records with the same group
of self.columnList into a single row. It allows an easy calculation of pilot efficiencies.
"""
pstates = ['Submitted', 'Done', 'Failed', 'Aborted',
'Running', 'Waiting', 'Scheduled', 'Ready']
def __init__(self, columnList):
"""
Initialise a table with columns to be grouped by.
:param columnList: i.e. ['GridSite', 'DestinationSite']
:return:
"""
self.columnList = columnList
# we want 'Site' and 'CE' in the final result
colMap = {'GridSite': 'Site', 'DestinationSite': 'CE'}
self._columns = [colMap.get(val, val) for val in columnList]
self._columns += self.pstates # MySQL._query() does not give us column names, sadly.
def buildSQL(self, selectDict=None):
"""
Build an SQL query to create a table with all status counts in one row, ("pivoted")
grouped by columns in the column list.
:param dict selectDict:
:return: SQL query
"""
lastUpdate = Time.dateTime() - Time.day
pvtable = 'pivoted'
innerGroupBy = "(SELECT %s, Status,\n " \
"count(CASE WHEN CurrentJobID=0 THEN 1 END) AS Empties," \
" count(*) AS qty FROM PilotAgents\n " \
"WHERE Status NOT IN ('Done', 'Aborted') OR (Status in ('Done', 'Aborted') \n" \
" AND \n" \
" LastUpdateTime > '%s')" \
" GROUP by %s, Status)\n AS %s" % (
_quotedList(self.columnList), lastUpdate,
_quotedList(self.columnList), pvtable)
# pivoted table: combine records with the same group of self.columnList into a single row.
pivotedQuery = "SELECT %s,\n" % ', '.join([pvtable + '.' + item for item in self.columnList])
lineTemplate = " SUM(if (pivoted.Status={state!r}, pivoted.qty, 0)) AS {state}"
pivotedQuery += ',\n'.join(lineTemplate.format(state=state) for state in self.pstates)
pivotedQuery += ",\n SUM(if (%s.Status='Done', %s.Empties,0)) AS Done_Empty,\n" \
" SUM(%s.qty) AS Total " \
"FROM\n" % (pvtable, pvtable, pvtable)
outerGroupBy = " GROUP BY %s) \nAS pivotedEff;" % _quotedList(self.columnList)
# add efficiency columns using aliases defined in the pivoted table
effCase = "(CASE\n WHEN pivotedEff.Done - pivotedEff.Done_Empty > 0 \n" \
" THEN pivotedEff.Done/(pivotedEff.Done-pivotedEff.Done_Empty) \n" \
" WHEN pivotedEff.Done=0 THEN 0 \n" \
" WHEN pivotedEff.Done=pivotedEff.Done_Empty \n" \
" THEN 99.0 ELSE 0.0 END) AS PilotsPerJob,\n" \
" (pivotedEff.Total - pivotedEff.Aborted)/pivotedEff.Total*100.0 AS PilotJobEff \nFROM \n("
effSelectTemplate = " CAST(pivotedEff.{state} AS UNSIGNED) AS {state} "
# now select the columns + states:
pivotedEff = "SELECT %s,\n" % ', '.join(['pivotedEff' + '.' + item for item in self.columnList]) + \
', '.join(effSelectTemplate.format(state=state) for state in self.pstates + ['Total']) + ", \n"
finalQuery = pivotedEff + effCase + pivotedQuery + innerGroupBy + outerGroupBy
self._columns += ['Total', 'PilotsPerJob', 'PilotJobEff']
return finalQuery
def getColumnList(self):
return self._columns
|
gpl-3.0
| 8,148,434,752,963,911,000
| 34.197874
| 119
| 0.572793
| false
| 3.855876
| false
| false
| false
|
jtotto/sooper-jack-midi-looper
|
src/GUI/jack_midi_looper_gui/engine_manager.py
|
1
|
10951
|
# JACK MIDI LOOPER
# Copyright (C) 2014 Joshua Otto
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import liblo
import logging
from jack_midi_looper_gui.models import MIDIMappingInfo
from jack_midi_looper_gui.subject import Subject
import subprocess
import threading
import time
class IEngineManager( Subject ):
"""Interface for the engine manager."""
def __init__( self ):
"""Constructs an engine manager with the given subscription keys."""
Subject.__init__( self )
self.add_key( "loops" )
self.add_key( "mappings" )
self.add_key( "shutdown" )
@staticmethod
def perform_notify( key, callback, data ):
"""
Implement the Subject's notify functionality.
There is NO guarantee that the provided callbacks will be invoked from the
same thread, so they should be written accordingly.
"""
if key == "shutdown":
callback()
else: # "loops", "mappings"
change_type, change_content = data
callback( change_type, change_content )
def initialize_subscribers( self ):
"""Retrieve the initial state of the engine."""
raise NotImplementedError
def cleanup( self ):
"""Wrap up interaction with the engine."""
raise NotImplementedError
def new_loop( self, name ):
"""
Requests that the engine create a new loop.
Args:
name (str): A string containing the name of the loop to be created.
Returns:
void
"""
raise NotImplementedError
def remove_loops( self, names ):
"""
Requests that the engine remove the given loops.
Args:
names (list[str]): A list of string names of loops to be removed.
Returns:
void
"""
raise NotImplementedError
def new_mapping( self, mapping_info ):
"""
Requests that the engine create a new MIDI mapping with the given
characterstics.
Args:
mapping_info (MIDIMappingInfo): A MIDIMappingInfo object for the engine to create.
Returns:
void
"""
raise NotImplementedError
def remove_mappings( self, mapping_infos ):
"""
Requests that the engine remove all of the specified MIDI mappings.
Args:
mapping_infos (list[MIDIMappingInfo]): A list of MIDIMappingInfo objects
to be removed.
Returns:
void
"""
raise NotImplementedError
def IEngineManagerFactory( engine_port, engine_host, our_port, fail_on_not_found,
quit_on_shutdown ):
"""Simply construct an appropriate IEngineManager."""
return EngineManager( engine_port, engine_host, our_port, fail_on_not_found,
quit_on_shutdown )
class EngineManager( IEngineManager ):
"""Default implementation of engine management using OSC."""
def __init__( self, engine_port, engine_host, our_port, fail_on_not_found,
quit_on_shutdown ):
"""
Initialize by establishing communication with an existing engine, or
spawning a new one if required.
Args:
loop_change_handler ((void)(str,str)): The callback to invoke when
notified by the engine.
mapping_change_handler ((void)(str,str)): The callback to invoke.
engine_port (int): The port on which to communicate with the engine
engine_host (str): The host on which to look for the engine.
our_port (int): The port on which our OSC server communicates.
fail_on_not_found (bool): Determines whether or not we should attempt to
spawn an engine instance in the case that the given one does not
respond.
"""
IEngineManager.__init__( self )
self._quit_on_shutdown = quit_on_shutdown
try:
if our_port is None:
self._server_thread = liblo.ServerThread()
else:
self._server_thread = liblo.ServerThread( our_port )
except liblo.ServerError:
print( "Problem setting up OSC!" )
raise
self._server_thread.add_method( "/pingack", "ssi", self._pingack_callback )
self._server_thread.add_method(
"/loop/update", "ss", self._loop_change_callback )
self._server_thread.add_method(
"/mapping/update", "ss", self._mapping_change_callback )
self._server_thread.add_method(
"/shutdown", "ss", self._shutdown_callback )
self._server_thread.start()
print( "GUI OSC Server at {0}".format( self._server_thread.get_url() ) )
self._received_pingack = False
self._pingack_lock = threading.Lock()
self._engine_address = liblo.Address( engine_host, engine_port )
liblo.send(
self._engine_address, "/ping", self._server_thread.get_url(), "/pingack" )
# Wait for the pingack.
time.sleep( 0.7 )
self._pingack_lock.acquire()
received = self._received_pingack
self._pingack_lock.release()
if not received:
if fail_on_not_found:
# TODO: something a little friendlier
raise EngineManager.NoEngineError
subprocess.Popen( ["jack_midi_looper", "-p", str( engine_port )] )
self._engine_address = liblo.Address( "localhost", engine_port )
time.sleep( 0.3 ) # Maybe a bit hacky...
liblo.send( self._engine_address, "/ping", self._server_thread.get_url(),
"/pingack" )
time.sleep( 0.7 )
self._pingack_lock.acquire()
if not self._received_pingack:
raise EngineManager.NoEngineError
self._pingack_lock.release()
class NoEngineError( Exception ):
pass
def initialize_subscribers( self ):
"""
Requests that the engine send us update information necessary to bring us up
to its current state.
"""
liblo.send( self._engine_address, "/loop_list",
self._server_thread.get_url(), "/loop/update" )
liblo.send( self._engine_address, "/midi_binding_list",
self._server_thread.get_url(), "/mapping/update" )
liblo.send( self._engine_address, "/register_auto_update", "loops",
self._server_thread.get_url(), "/loop/update" )
liblo.send( self._engine_address, "/register_auto_update", "mappings",
self._server_thread.get_url(), "/mapping/update" )
liblo.send( self._engine_address, "/register_auto_update", "shutdown",
self._server_thread.get_url(), "/shutdown" )
def cleanup( self ):
"""
Conclude interaction with the engine by unsubscribing and potentially
quitting.
"""
if self._quit_on_shutdown:
liblo.send( self._engine_address, "/quit" )
else:
liblo.send( self._engine_address, "/unregister_auto_update", "loops",
self._server_thread.get_url(), "/loop/update" )
liblo.send( self._engine_address, "/unregister_auto_update", "mappings",
self._server_thread.get_url(), "/mapping/update" )
def _pingack_callback( self, path, args ):
host_url, version, loopcount = args
print( "Received pingack from engine on host {0} running version {1}."
.format( host_url, version ) )
print( "The engine currently has {0} loops.".format( loopcount ) )
self._pingack_lock.acquire()
self._received_pingack = True
self._pingack_lock.release()
def _shutdown_callback( self, path, args ):
self.notify( "shutdown", args )
def _loop_change_callback( self, path, args ):
logging.info( "loop change callback" )
for arg in args:
logging.info( " %s", arg )
self.notify( "loops", args )
type_serializations = {
"Note On":"on",
"Note Off":"off",
"CC On":"cc_on",
"CC Off":"cc_off"
}
type_deserializations = {
"on":"Note On",
"off":"Note Off",
"cc_on":"CC On",
"cc_off":"CC Off"
}
action_serializations = {
"Toggle Playback":"toggle_playback",
"Toggle Recording":"toggle_recording"
}
action_deserializations = {
"toggle_playback":"Toggle Playback",
"toggle_recording":"Toggle Recording"
}
@staticmethod
def _serialize_mapping( mapping_info ):
return "{0} {1} {2} {3} {4}".format( mapping_info.channel,
EngineManager.type_serializations[mapping_info.midi_type], mapping_info.value,
EngineManager.action_serializations[mapping_info.loop_action], mapping_info.loop_name )
@staticmethod
def _deserialize_mapping( mapping_serialization ):
data = mapping_serialization.split( " " )
channel = int( data[0] )
midi_type = EngineManager.type_deserializations[data[1]]
value = int( data[2] )
loop_action = EngineManager.action_deserializations[data[3]]
loop_name = data[4]
return MIDIMappingInfo( channel, midi_type, value, loop_name, loop_action )
def _mapping_change_callback( self, path, args ):
logging.info( "mapping change callback" )
for arg in args:
logging.info( " %s", arg )
change, serialization = args
deserialized = ( change, self._deserialize_mapping( serialization ) )
self.notify( "mappings", deserialized )
def new_loop( self, name ):
liblo.send( self._engine_address, "/loop_add", name )
def remove_loops( self, names ):
for name in names:
liblo.send( self._engine_address, "/loop_del", name )
def new_mapping( self, mapping_info ):
serialization = self._serialize_mapping( mapping_info )
liblo.send( self._engine_address, "/add_midi_binding", serialization )
def remove_mappings( self, mapping_infos ):
for info in mapping_infos:
serialization = self._serialize_mapping( info )
liblo.send( self._engine_address, "/remove_midi_binding", serialization )
|
gpl-2.0
| -1,342,625,881,783,382,500
| 35.748322
| 99
| 0.601863
| false
| 4.152825
| false
| false
| false
|
ramansbach/cluster_analysis
|
clustering/visualize.py
|
1
|
2797
|
from __future__ import absolute_import, division, print_function
import numpy as np
import gsd.hoomd
import sklearn
import scipy.optimize as opt
import os
import pdb
from sklearn.neighbors import BallTree
from sklearn.neighbors import radius_neighbors_graph
from scipy.spatial.distance import cdist
from scipy.special import erf
from scipy.sparse.csgraph import connected_components
#from .due import due, Doi
from .smoluchowski import massAvSize
#from mpi4py import MPI
from cdistances import conOptDistanceCython,alignDistancesCython
__all__ = ['writeFrameForVMD','writeFramesFromCIDFile']
def writeFrameForVMD(clusterIDs,molno,atomno,frameout):
""" Function that writes out a single frame for coloring by cluster
Parameters
----------
clusterIDs: list of ints for the frame corresponding to each molecule's
cluster index
molno: int
number of molecules in frame
atomno: int
number of atoms per molecule
frameout: string
name of output file
Notes
-----
Format of output file has a line for each cluster consisting of a set
of ints. The first int is the colorID, and the rest are the atom indices
that should be set to that color. By assumption, there are 16 available
different colors.
First line of file contains the total number of subsequent lines
(# of clusters)
"""
framefile = open(frameout,'w')
ind = 0
framefile.write('{0}\n'.format(max(clusterIDs)+1))
for cID in range(max(clusterIDs)+1):
#pdb.set_trace()
line = ''
colorID = ind % 16
line += str(colorID) + ' '
molinds = np.where(cID == np.array(clusterIDs))[0]
ainds = molinds.copy()
for molind in molinds:
ainds = np.concatenate((ainds,
molno+molind*(atomno-1)+np.arange(0,
atomno-1)))
for aind in ainds:
line += str(aind) + ' '
line += '\n'
framefile.write(line)
ind += 1
framefile.close()
def writeFramesFromCIDFile(cIDfile,molno,atomno,frameoutbase):
""" Function that writes out multiple frames for coloring by cluster
Parameters
----------
cIDfile: file containing cluster IDs
molno: int
number of molecules in frame
atomno: int
number of atoms per molecule
frameoutbase: string
base name of output files
"""
cIDfile = open(cIDfile)
lines = cIDfile.readlines()
cIDfile.close()
ind = 0
for line in lines:
cIDsf = [float(c) for c in line.split()]
cIDs = [int(c) for c in cIDsf]
writeFrameForVMD(cIDs,molno,atomno,frameoutbase+str(ind)+'.dat')
ind+=1
|
mit
| -3,627,616,678,007,215,000
| 30.784091
| 78
| 0.632463
| false
| 3.950565
| false
| false
| false
|
ethereum/pydevp2p
|
devp2p/multiplexer.py
|
1
|
21218
|
from gevent.queue import Queue
from collections import OrderedDict
import rlp
from rlp.utils import str_to_bytes, is_integer
import struct
import sys
sys.setrecursionlimit(10000) # frames are generated recursively
# chunked-0: rlp.list(protocol-type, sequence-id, total-packet-size)
header_data_sedes = rlp.sedes.List([rlp.sedes.big_endian_int] * 3, strict=False)
def ceil16(x):
return x if x % 16 == 0 else x + 16 - (x % 16)
def rzpad16(data):
if len(data) % 16:
data += b'\x00' * (16 - len(data) % 16)
return data
class MultiplexerError(Exception):
pass
class DeserializationError(MultiplexerError):
pass
class FormatError(MultiplexerError):
pass
class FrameCipherBase(object):
mac_len = 16
header_len = 32
dummy_mac = '\x00' * mac_len
block_size = 16
def encrypt(self, header, frame):
assert len(header) == self.header_len
assert len(frame) % self.block_size == 0
return header + self.mac + frame + self.mac
def decrypt_header(self, data):
assert len(data) >= self.header_len + self.mac_len + 1 + self.mac_len
return data[:self.header_len]
def decrypt_body(self, data, body_size):
assert len(data) >= self.header_len + self.mac_len + body_size + self.mac_len
frame_offset = self.header_len + self.mac_len
return data[frame_offset:frame_offset + body_size]
class Frame(object):
"""
When sending a packet over RLPx, the packet will be framed.
The frame provides information about the size of the packet and the packet's
source protocol. There are three slightly different frames, depending on whether
or not the frame is delivering a multi-frame packet. A multi-frame packet is a
packet which is split (aka chunked) into multiple frames because it's size is
larger than the protocol window size (pws; see Multiplexing). When a packet is
chunked into multiple frames, there is an implicit difference between the first
frame and all subsequent frames.
Thus, the three frame types are
normal, chunked-0 (first frame of a multi-frame packet),
and chunked-n (subsequent frames of a multi-frame packet).
Single-frame packet:
header || header-mac || frame || mac
Multi-frame packet:
header || header-mac || frame-0 ||
[ header || header-mac || frame-n || ... || ]
header || header-mac || frame-last || mac
"""
header_size = 16
mac_size = 16
padding = 16
is_chunked_0 = False
total_payload_size = None # only used with chunked_0
frame_cipher = None
cipher_called = False
def __init__(self, protocol_id, cmd_id, payload, sequence_id, window_size,
is_chunked_n=False, frames=None, frame_cipher=None):
payload = memoryview(payload)
assert is_integer(window_size)
assert window_size % self.padding == 0
assert isinstance(cmd_id, int) and cmd_id < 256
self.cmd_id = cmd_id
self.payload = payload
if frame_cipher:
self.frame_cipher = frame_cipher
self.frames = frames or []
assert protocol_id < 2**16
self.protocol_id = protocol_id
assert sequence_id is None or sequence_id < 2**16
self.sequence_id = sequence_id
self.is_chunked_n = is_chunked_n
self.frames.append(self)
# chunk payloads resulting in frames exceeding window_size
fs = self.frame_size()
if fs > window_size:
if not is_chunked_n:
self.is_chunked_0 = True
self.total_payload_size = self.body_size()
# chunk payload
self.payload = payload[:window_size - fs]
assert self.frame_size() <= window_size
remain = payload[len(self.payload):]
assert len(remain) + len(self.payload) == len(payload)
Frame(protocol_id, cmd_id, remain, sequence_id, window_size,
is_chunked_n=True,
frames=self.frames,
frame_cipher=frame_cipher)
assert self.frame_size() <= window_size
def __repr__(self):
return '<Frame(%s, len=%d sid=%r)>' % \
(self._frame_type(), self.frame_size(), self.sequence_id)
def _frame_type(self):
return 'normal' * self.is_normal or 'chunked_0' * self.is_chunked_0 or 'chunked_n'
def body_size(self, padded=False):
# frame-size: 3-byte integer size of frame, big endian encoded (excludes padding)
# frame relates to body w/o padding w/o mac
l = len(self.enc_cmd_id) + len(self.payload)
if padded:
l = ceil16(l)
return l
def frame_size(self):
# header16 || mac16 || dataN + [padding] || mac16
return self.header_size + self.mac_size + self.body_size(padded=True) + self.mac_size
@property
def is_normal(self):
return not self.is_chunked_n and not self.is_chunked_0
@property
def header(self):
"""
header: frame-size || header-data || padding
frame-size: 3-byte integer size of frame, big endian encoded
header-data:
normal: rlp.list(protocol-type[, sequence-id])
chunked-0: rlp.list(protocol-type, sequence-id, total-packet-size)
chunked-n: rlp.list(protocol-type, sequence-id)
normal, chunked-n: rlp.list(protocol-type[, sequence-id])
values:
protocol-type: < 2**16
sequence-id: < 2**16 (this value is optional for normal frames)
total-packet-size: < 2**32
padding: zero-fill to 16-byte boundary
"""
assert self.protocol_id < 2**16
assert self.sequence_id is None or self.sequence_id < 2**16
l = [self.protocol_id]
if self.is_chunked_0:
assert self.sequence_id is not None
l.append(self.sequence_id)
l.append(self.total_payload_size)
elif self.sequence_id is not None: # normal, chunked_n
l.append(self.sequence_id)
header_data = rlp.encode(l, sedes=header_data_sedes)
assert tuple(l) == rlp.decode(header_data, sedes=header_data_sedes, strict=False)
# write body_size to header
# frame-size: 3-byte integer size of frame, big endian encoded (excludes padding)
# frame relates to body w/o padding w/o mac
body_size = self.body_size()
assert body_size < 256**3
header = struct.pack('>I', body_size)[1:] + header_data
header = rzpad16(header) # padding
assert len(header) == self.header_size
return header
@property
def enc_cmd_id(self):
if not self.is_chunked_n:
return rlp.encode(self.cmd_id, sedes=rlp.sedes.big_endian_int) # unsigned byte
return b''
@property
def body(self):
"""
frame:
normal: rlp(packet-type) [|| rlp(packet-data)] || padding
chunked-0: rlp(packet-type) || rlp(packet-data...)
chunked-n: rlp(...packet-data) || padding
padding: zero-fill to 16-byte boundary (only necessary for last frame)
"""
b = self.enc_cmd_id # packet-type
length = len(b) + len(self.payload)
assert isinstance(self.payload, memoryview)
return b + self.payload.tobytes() + b'\x00' * (ceil16(length) - length)
def get_frames(self):
return self.frames
def as_bytes(self):
assert not self.cipher_called # must only be called once
if not self.frame_cipher:
assert len(self.header) == 16 == self.header_size
assert len(self.body) == self.body_size(padded=True)
dummy_mac = b'\x00' * self.mac_size
r = self.header + dummy_mac + self.body + dummy_mac
assert len(r) == self.frame_size()
return r
else:
self.cipher_called = True
e = self.frame_cipher.encrypt(self.header, self.body)
assert len(e) == self.frame_size()
return e
class Packet(object):
"""
Packets are emitted and received by subprotocols
"""
def __init__(self, protocol_id=0, cmd_id=0, payload=b'', prioritize=False):
self.protocol_id = protocol_id
self.cmd_id = cmd_id
self.payload = payload
self.prioritize = prioritize
def __repr__(self):
return 'Packet(%r)' % dict(protocol_id=self.protocol_id,
cmd_id=self.cmd_id,
payload_len=len(self.payload),
prioritize=self.prioritize)
def __eq__(self, other):
s = dict(self.__dict__)
s.pop('prioritize')
o = dict(other.__dict__)
o.pop('prioritize')
return s == o
def __len__(self):
return len(self.payload)
class Multiplexer(object):
"""
Multiplexing of protocols is performed via dynamic framing and fair queueing.
Dequeuing packets is performed in a cycle which dequeues one or more packets
from the queue(s) of each active protocol. The multiplexor determines the
amount of bytes to send for each protocol prior to each round of dequeuing packets.
If the size of an RLP-encoded packet is less than 1 KB then the protocol may
request that the network layer prioritize the delivery of the packet.
This should be used if and only if the packet must be delivered before all other packets.
The network layer maintains two queues and three buffers per protocol:
a queue for normal packets, a queue for priority packets,
a chunked-frame buffer, a normal-frame buffer, and a priority-frame buffer.
Implemented Variant:
each sub protocol has three queues
prio
normal
chunked
protocols are queried round robin
"""
max_window_size = 8 * 1024
max_priority_frame_size = 1024
max_payload_size = 10 * 1024**2
frame_cipher = None
_cached_decode_header = None
def __init__(self, frame_cipher=None):
if frame_cipher:
# assert isinstance(frame_cipher, FrameCipherBase)
self.frame_cipher = frame_cipher
self.queues = OrderedDict() # protocol_id : dict(normal=queue, chunked=queue, prio=queue)
self.sequence_id = dict() # protocol_id : counter
self.last_protocol = None # last protocol, which sent data to the buffer
self.chunked_buffers = dict() # decode: protocol_id: dict(sequence_id: buffer)
self._decode_buffer = bytearray()
@property
def num_active_protocols(self):
"A protocol is considered active if it's queue contains one or more packets."
return sum(1 for p_id in self.queues if self.is_active_protocol(p_id))
def is_active_protocol(self, protocol_id):
return True if sum(q.qsize() for q in self.queues[protocol_id].values()) else False
def protocol_window_size(self, protocol_id=None):
"""
pws = protocol-window-size = window-size / active-protocol-count
initial pws = 8kb
"""
if protocol_id and not self.is_active_protocol(protocol_id):
s = self.max_window_size // (1 + self.num_active_protocols)
else:
s = self.max_window_size // max(1, self.num_active_protocols)
return s - s % 16 # should be a multiple of padding size
def add_protocol(self, protocol_id):
assert protocol_id not in self.queues
self.queues[protocol_id] = dict(normal=Queue(),
chunked=Queue(),
priority=Queue())
self.sequence_id[protocol_id] = 0
self.chunked_buffers[protocol_id] = dict()
self.last_protocol = protocol_id
@property
def next_protocol(self):
protocols = tuple(self.queues.keys())
if self.last_protocol == protocols[-1]:
next_protocol = protocols[0]
else:
next_protocol = protocols[protocols.index(self.last_protocol) + 1]
self.last_protocol = next_protocol
return next_protocol
def add_packet(self, packet):
#protocol_id, cmd_id, rlp_data, prioritize=False
sid = self.sequence_id[packet.protocol_id]
self.sequence_id[packet.protocol_id] = (sid + 1) % 2**16
frames = Frame(packet.protocol_id, packet.cmd_id, packet.payload,
sequence_id=sid,
window_size=self.protocol_window_size(packet.protocol_id),
frame_cipher=self.frame_cipher
).frames
queues = self.queues[packet.protocol_id]
if packet.prioritize:
assert len(frames) == 1
assert frames[0].frame_size() <= self.max_priority_frame_size
queues['priority'].put(frames[0])
elif len(frames) == 1:
queues['normal'].put(frames[0])
else:
for f in frames:
queues['chunked'].put(f)
def pop_frames_for_protocol(self, protocol_id):
"""
If priority packet and normal packet exist:
send up to pws/2 bytes from each (priority first!)
else if priority packet and chunked-frame exist:
send up to pws/2 bytes from each
else
if normal packet and chunked-frame exist: send up to pws/2 bytes from each
else
read pws bytes from active buffer
If there are bytes leftover -- for example, if the bytes sent is < pws,
then repeat the cycle.
"""
pws = self.protocol_window_size()
queues = self.queues[protocol_id]
frames = []
# size = lambda:
size = 0
while size < pws:
frames_added = 0
for qn in ('priority', 'normal', 'chunked'):
q = queues[qn]
if q.qsize():
fs = q.peek().frame_size()
if size + fs <= pws:
frames.append(q.get())
size += fs
frames_added += 1
# add no more than two in order to send normal and priority first
if frames_added == 2:
break # i.e. next is 'priority' again
# empty queues
if frames_added == 0:
break
# the following can not be guaranteed, as pws might have been different
# at the time where packets were framed and added to the queues
# assert sum(f.frame_size() for f in frames) <= pws
return frames
def pop_frames(self):
"""
returns the frames for the next protocol up to protocol window size bytes
"""
protocols = tuple(self.queues.keys())
idx = protocols.index(self.next_protocol)
protocols = protocols[idx:] + protocols[:idx]
assert len(protocols) == len(self.queues.keys())
for p in protocols:
frames = self.pop_frames_for_protocol(p)
if frames:
return frames
return []
def pop_all_frames(self):
frames = []
while True:
r = self.pop_frames()
frames.extend(r)
if not r:
break
return frames
def pop_all_frames_as_bytes(self):
return b''.join(f.as_bytes() for f in self.pop_all_frames())
def decode_header(self, buffer):
assert isinstance(buffer, memoryview)
assert len(buffer) >= 32
if self.frame_cipher:
header = self.frame_cipher.decrypt_header(
buffer[:Frame.header_size + Frame.mac_size].tobytes())
else:
# header: frame-size || header-data || padding
header = buffer[:Frame.header_size].tobytes()
return header
def decode_body(self, buffer, header=None):
"""
w/o encryption
peak into buffer for body_size
return None if buffer is not long enough to decode frame
"""
assert isinstance(buffer, memoryview)
if len(buffer) < Frame.header_size:
return None, buffer
if not header:
header = self.decode_header(buffer[:Frame.header_size + Frame.mac_size].tobytes())
body_size = struct.unpack('>I', b'\x00' + header[:3])[0]
if self.frame_cipher:
body = self.frame_cipher.decrypt_body(buffer[Frame.header_size + Frame.mac_size:].tobytes(),
body_size)
assert len(body) == body_size
bytes_read = Frame.header_size + Frame.mac_size + ceil16(len(body)) + Frame.mac_size
else:
# header: frame-size || header-data || padding
header = buffer[:Frame.header_size].tobytes()
# frame-size: 3-byte integer size of frame, big endian encoded (excludes padding)
# frame relates to body w/o padding w/o mac
body_offset = Frame.header_size + Frame.mac_size
body = buffer[body_offset:body_offset + body_size].tobytes()
assert len(body) == body_size
bytes_read = ceil16(body_offset + body_size + Frame.mac_size)
assert bytes_read % Frame.padding == 0
# normal, chunked-n: rlp.list(protocol-type[, sequence-id])
# chunked-0: rlp.list(protocol-type, sequence-id, total-packet-size)
try:
header_data = rlp.decode(header[3:], sedes=header_data_sedes, strict=False)
except rlp.RLPException:
raise DeserializationError('invalid rlp data')
if len(header_data) == 3:
chunked_0 = True
total_payload_size = header_data[2]
assert total_payload_size < 2**32
else:
chunked_0 = False
total_payload_size = None
# protocol-type: < 2**16
protocol_id = header_data[0]
assert protocol_id < 2**16
# sequence-id: < 2**16 (this value is optional for normal frames)
if len(header_data) > 1:
sequence_id = header_data[1]
assert sequence_id < 2**16
else:
sequence_id = None
# build packet
if protocol_id not in self.chunked_buffers:
raise MultiplexerError('unknown protocol_id %d' % (protocol_id))
chunkbuf = self.chunked_buffers[protocol_id]
if sequence_id in chunkbuf:
# body chunked-n: packet-data || padding
packet = chunkbuf[sequence_id]
if chunked_0:
raise MultiplexerError('received chunked_0 frame for existing buffer %d of protocol %d' %
(sequence_id, protocol_id))
if len(body) > packet.total_payload_size - len(packet.payload):
raise MultiplexerError('too much data for chunked buffer %d of protocol %d' %
(sequence_id, protocol_id))
# all good
packet.payload += body
if packet.total_payload_size == len(packet.payload):
del packet.total_payload_size
del chunkbuf[sequence_id]
return packet
else:
# body normal, chunked-0: rlp(packet-type) [|| rlp(packet-data)] || padding
item, end = rlp.codec.consume_item(body, 0)
cmd_id = rlp.sedes.big_endian_int.deserialize(item)
if chunked_0:
payload = bytearray(body[end:])
total_payload_size -= end
else:
payload = body[end:]
packet = Packet(protocol_id=protocol_id, cmd_id=cmd_id, payload=payload)
if chunked_0:
if total_payload_size < len(payload):
raise MultiplexerError('total payload size smaller than initial chunk')
if total_payload_size == len(payload):
return packet # shouldn't have been chunked, whatever
assert sequence_id is not None
packet.total_payload_size = total_payload_size
chunkbuf[sequence_id] = packet
else:
return packet # normal (non-chunked)
def decode(self, data=''):
if data:
self._decode_buffer.extend(data)
if not self._cached_decode_header:
if len(self._decode_buffer) < Frame.header_size + Frame.mac_size:
return []
else:
self._cached_decode_header = self.decode_header(memoryview(self._decode_buffer))
assert isinstance(self._cached_decode_header, bytes)
body_size = struct.unpack('>I', b'\x00' + self._cached_decode_header[:3])[0]
required_len = Frame.header_size + Frame.mac_size + ceil16(body_size) + Frame.mac_size
if len(self._decode_buffer) >= required_len:
packet = self.decode_body(memoryview(self._decode_buffer), self._cached_decode_header)
self._cached_decode_header = None
self._decode_buffer = self._decode_buffer[required_len:]
if packet:
return [packet] + self.decode()
else:
return self.decode()
return []
|
mit
| -6,756,658,152,461,528,000
| 37.578182
| 105
| 0.582996
| false
| 3.985349
| false
| false
| false
|
tschaefer/director
|
director/__init__.py
|
1
|
2748
|
# -*- coding: utf-8 -*-
import sys
import os
import argparse
from director.importer import Importer
from director.updater import Updater
from director.service import Service
def stype(bytestring):
unicode_string = bytestring.decode(sys.getfilesystemencoding())
return unicode_string
def parse_options():
db = os.path.join(os.path.expanduser('~'), 'director.db')
db = 'sqlite:///%s' % db
parser = argparse.ArgumentParser(description='Director')
parser.add_argument('-d', '--database',
type=unicode,
default=db,
help='database url')
subparsers = parser.add_subparsers()
parser_import = subparsers.add_parser('import')
parser_import.set_defaults(importer=True)
parser_import.add_argument('path',
type=stype,
help='media path')
parser_import.add_argument('-v', '--verbose',
action='store_true',
help='verbose output')
parser_update = subparsers.add_parser('update')
parser_update.set_defaults(updater=True)
parser_update.add_argument('path',
type=stype,
help='media path')
parser_update.add_argument('-v', '--verbose',
action='store_true',
help='verbose output')
parser_service = subparsers.add_parser('service')
parser_service.set_defaults(service=True)
parser_service.add_argument('path',
type=stype,
help='media path')
parser_service.add_argument('-H', '--host',
type=unicode,
default='localhost',
help='bind to address')
parser_service.add_argument('-p', '--port',
type=int,
default=8888,
help='listen to port')
return parser.parse_args()
def run(args):
if hasattr(args, 'importer'):
importer = Importer(path=args.path, database=args.database,
verbose=args.verbose)
importer.run()
elif hasattr(args, 'updater'):
updater = Updater(path=args.path, database=args.database,
verbose=args.verbose)
updater.run()
elif hasattr(args, 'service'):
service = Service(host=args.host, port=args.port,
database=args.database, path=args.path)
service.run()
def main():
args = parse_options()
run(args)
if __name__ == '__main__':
main()
|
bsd-3-clause
| -2,391,396,943,135,285,000
| 32.108434
| 67
| 0.519651
| false
| 4.649746
| false
| false
| false
|
mitodl/bootcamp-ecommerce
|
localdev/seed/app_state_api.py
|
1
|
17130
|
"""API functionality for setting the state of an application"""
import os
import random
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files import File
from django.db.models import Max
from applications.models import (
VideoInterviewSubmission,
QuizSubmission,
ApplicationStepSubmission,
)
from applications.constants import (
AppStates,
REVIEW_STATUS_REJECTED,
REVIEW_STATUS_PENDING,
REVIEW_STATUS_APPROVED,
ORDERED_UNFINISHED_APP_STATES,
SUBMISSION_REVIEW_COMPLETED_STATES,
SUBMISSION_VIDEO,
SUBMISSION_QUIZ,
SUBMISSION_STATUS_SUBMITTED,
)
from jobma.models import Job, Interview
from jobma.constants import COMPLETED
from ecommerce.models import Order, Line
from ecommerce.api import complete_successful_order
from profiles.models import Profile, LegalAddress
from profiles.api import is_user_info_complete
from main.utils import now_in_utc, get_filename_from_path, partition_around_index
User = get_user_model()
ALLOWED_STATES = ORDERED_UNFINISHED_APP_STATES + [AppStates.COMPLETE.value]
DUMMY_RESUME_FILEPATH = "localdev/seed/resources/dummy_resume.pdf"
DUMMY_RESUME_FILENAME = get_filename_from_path(DUMMY_RESUME_FILEPATH)
DUMMY_RESUME_ENCODING = "iso-8859-1"
DUMMY_LINKEDIN_URL = "http://example.com/linkedin"
DUMMY_INTERVIEW_URL = ("http://example.com/video",)
DUMMY_INTERVIEW_RESULTS_URL = "http://example.com/video-result"
INTERVIEW_TEMPLATE_ID = 123
PROFILE_CHOICES = {
"company": ("MIT", "Boeing"),
"gender": ("m", "f", "o"),
"birth_year": (1950, 1960, 1970, 1980, 1990),
"job_title": ("Software Developer", "Administrator", "Professor", "Emperor"),
"industry": ("Tech", "Higher Ed"),
"job_function": ("Working hard", "Hardly working"),
"company_size": (9, 99),
"years_experience": (2, 5, 10),
"highest_education": ("Doctorate", "Bachelor's degree"),
"name": (
"Joseph M. Acaba",
"Kayla Barron",
"Raja Chari",
"Jeanatte J. Epps",
"Bob Hines",
"Jonny Kim",
"Nicole Aunapu Mann",
"Kathleen Rubins",
"Mark T. Vande Hei",
),
}
LEGAL_ADDRESS_CHOICES = {
"street_address_1": ("1 Main St", "500 Technology Square", "4 Washington Lane"),
"city": ("Cambridge", "Boston", "Somerville", "Townsville"),
"country": ("US",),
"state_or_territory": ("US-MA", "US-CT", "US-VT", "US-NH"),
"postal_code": ("02139", "02201", "02139"),
}
def fill_out_registration_info(user):
"""Ensures that the user has a fully filled out profile and legal address"""
profile, profile_created = Profile.objects.get_or_create(user=user)
if profile_created or not profile.is_complete:
profile.name = random.choice(PROFILE_CHOICES["name"])
profile_field_values = [
(field_name, values)
for field_name, values in PROFILE_CHOICES.items()
if field_name != "name"
]
for field_name, values in profile_field_values:
setattr(profile, field_name, random.choice(values))
profile.save()
if not profile.name:
profile.name = random.choice(PROFILE_CHOICES["name"])
profile.save()
if not hasattr(user, "legal_address"):
legal_address_props = {
prop_name: random.choice(prop_values)
for prop_name, prop_values in LEGAL_ADDRESS_CHOICES.items()
}
legal_address = LegalAddress.objects.create(
user=user,
first_name=profile.name.split(" ")[0],
last_name=" ".join(profile.name.split(" ")[1:]),
**legal_address_props,
)
else:
legal_address = user.legal_address
return user, profile, legal_address
def fulfill_video_interview(application, run_application_step):
"""
Ensures that a user has a completed video interview submission for the given application and step
Args:
application (applications.models.BootcampApplication):
run_application_step (applications.models.BootcampRunApplicationStep):
Returns:
ApplicationStepSubmission: The created or updated submission
"""
# If Job records already exist, use the max job_id value and add 1 for the new job_id. Otherwise use 1.
job_id = (
1
if Job.objects.count() == 0
else (Job.objects.aggregate(max_job_id=Max("job_id"))["max_job_id"] + 1)
)
job, _ = Job.objects.get_or_create(
run=application.bootcamp_run,
defaults=dict(
job_title=application.bootcamp_run.title,
job_id=job_id,
job_code=f"job_run_{application.bootcamp_run.id}",
interview_template_id=INTERVIEW_TEMPLATE_ID,
),
)
interview, _ = Interview.objects.get_or_create(
job=job,
applicant=application.user,
defaults=dict(
status=COMPLETED,
interview_url=DUMMY_INTERVIEW_URL,
results_url=DUMMY_INTERVIEW_RESULTS_URL,
interview_token="".join([str(random.randint(0, 9)) for _ in range(0, 9)]),
),
)
submission, _ = VideoInterviewSubmission.objects.get_or_create(interview=interview)
step_submission, _ = ApplicationStepSubmission.objects.update_or_create(
bootcamp_application=application,
run_application_step=run_application_step,
defaults=dict(
submitted_date=now_in_utc(),
review_status=REVIEW_STATUS_PENDING,
review_status_date=None,
submission_status=SUBMISSION_STATUS_SUBMITTED,
content_type=ContentType.objects.get(
app_label="applications", model=SUBMISSION_VIDEO
),
object_id=submission.id,
),
)
return step_submission
def fulfill_quiz_interview(application, run_application_step):
"""
Ensures that a user has a completed quiz interview submission for the given application and step
Args:
application (applications.models.BootcampApplication):
run_application_step (applications.models.BootcampRunApplicationStep):
Returns:
ApplicationStepSubmission: The created or updated submission
"""
submission = QuizSubmission.objects.create(started_date=None)
step_submission, _ = ApplicationStepSubmission.objects.update_or_create(
bootcamp_application=application,
run_application_step=run_application_step,
defaults=dict(
submitted_date=now_in_utc(),
review_status=REVIEW_STATUS_PENDING,
review_status_date=None,
submission_status=SUBMISSION_STATUS_SUBMITTED,
content_type=ContentType.objects.get(
app_label="applications", model=SUBMISSION_QUIZ
),
object_id=submission.id,
),
)
return step_submission
SUBMISSION_FACTORIES = {
SUBMISSION_VIDEO: fulfill_video_interview,
SUBMISSION_QUIZ: fulfill_quiz_interview,
}
class AppStep:
"""Base class for evaluating/setting an application at a certain state"""
state = None
@staticmethod
def is_fulfilled(application):
"""Returns True if the given application step has been fulfilled"""
raise NotImplementedError
@staticmethod
def _fulfill(application, **kwargs):
"""Performs the necessary data manipulation to fulfill this step of the application"""
raise NotImplementedError
@staticmethod
def _revert(application):
"""
Performs the necessary data manipulation to ensure that this step of the application has not been fulfilled
"""
raise NotImplementedError
@classmethod
def fulfill(cls, application, **kwargs):
"""
Performs the necessary data manipulation to fulfill this step of the application, and ensures that the
application is in the correct state afterwards
"""
cls._fulfill(application, **kwargs)
# NOTE: These functions perform some data manipulation on an application that aren't supported by normal
# functionality, hence the manual setting of the state instead of using state transitions.
application.refresh_from_db()
state_idx = ORDERED_UNFINISHED_APP_STATES.index(cls.state)
new_state = (
AppStates.COMPLETE.value
if state_idx == len(ORDERED_UNFINISHED_APP_STATES) - 1
else ORDERED_UNFINISHED_APP_STATES[state_idx + 1]
)
application.state = new_state
application.save()
@classmethod
def revert(cls, application):
"""
Performs the necessary data manipulation to ensure that this step of the application has not been fulfilled,
and ensures that the application is in the correct state afterwards
"""
cls._revert(application)
# NOTE: These functions perform some data manipulation on an application that aren't supported by normal
# functionality, hence the manual setting of the state instead of using state transitions.
application.refresh_from_db()
application.state = cls.state
application.save()
class AwaitingProfileStep(AppStep):
"""Provides functionality for fulfilling or reverting the 'awaiting profile' step of an application"""
state = AppStates.AWAITING_PROFILE_COMPLETION.value
@staticmethod
def is_fulfilled(application):
return is_user_info_complete(application.user)
@staticmethod
def _fulfill(application, **kwargs):
fill_out_registration_info(application.user)
@staticmethod
def _revert(application):
LegalAddress.objects.filter(user=application.user).delete()
class AwaitingResumeStep(AppStep):
"""Provides functionality for fulfilling or reverting the 'awaiting resume' step of an application"""
state = AppStates.AWAITING_RESUME.value
@staticmethod
def is_fulfilled(application):
return application.resume_upload_date is not None and (
application.resume_file is not None or application.linkedin_url is not None
)
@staticmethod
def _fulfill(application, **kwargs):
with open(
os.path.join(settings.BASE_DIR, DUMMY_RESUME_FILEPATH), "rb"
) as resume_file:
application.add_resume(
resume_file=File(resume_file, name=DUMMY_RESUME_FILENAME),
linkedin_url=DUMMY_LINKEDIN_URL,
)
application.save()
@staticmethod
def _revert(application):
if application.resume_file is not None:
application.resume_file.delete()
application.resume_file = None
application.linkedin_url = None
application.resume_upload_date = None
application.save()
class AwaitingSubmissionsStep(AppStep):
"""Provides functionality for fulfilling or reverting the 'awaiting submissions' step of an application"""
state = AppStates.AWAITING_USER_SUBMISSIONS.value
@staticmethod
def is_fulfilled(application):
submissions = list(application.submissions.all())
submission_review_statuses = [
submission.review_status for submission in submissions
]
if any(
[status == REVIEW_STATUS_REJECTED for status in submission_review_statuses]
):
return True
elif any(
[status == REVIEW_STATUS_PENDING for status in submission_review_statuses]
):
return True
elif len(submissions) < application.bootcamp_run.application_steps.count():
return False
@staticmethod
def _fulfill(application, **kwargs):
num_to_fulfill = kwargs.get("num_submissions", None)
run_steps = application.bootcamp_run.application_steps.order_by(
"application_step__step_order"
).all()
num_to_fulfill = num_to_fulfill or len(run_steps)
if num_to_fulfill and num_to_fulfill > len(run_steps):
raise ValidationError(
"{} step(s) exist. Cannot fulfill {}.".format(
len(run_steps), num_to_fulfill
)
)
for i, run_step in enumerate(run_steps):
if i >= num_to_fulfill:
break
submission_factory = SUBMISSION_FACTORIES[
run_step.application_step.submission_type
]
submission_factory(application, run_step)
@staticmethod
def _revert(application):
application.submissions.all().delete()
class AwaitingReviewStep(AppStep):
"""Provides functionality for fulfilling or reverting the 'awaiting submission review' step of an application"""
state = AppStates.AWAITING_SUBMISSION_REVIEW.value
@staticmethod
def is_fulfilled(application):
submissions = list(application.submissions.all())
submission_review_statuses = [
submission.review_status for submission in submissions
]
return len(submissions) > 0 and len(submissions) == len(
[
status
for status in submission_review_statuses
if status in SUBMISSION_REVIEW_COMPLETED_STATES
]
)
@staticmethod
def _fulfill(application, **kwargs):
num_to_fulfill = kwargs.get("num_reviews", None)
submissions = list(
application.submissions.order_by(
"run_application_step__application_step__step_order"
).all()
)
num_to_fulfill = num_to_fulfill or len(submissions)
if num_to_fulfill and num_to_fulfill > len(submissions):
raise ValidationError(
"{} submission(s) exist. Cannot fulfill {}.".format(
len(submissions), num_to_fulfill
)
)
now = now_in_utc()
for i, submission in enumerate(submissions):
if i >= num_to_fulfill:
break
submission.review_status = REVIEW_STATUS_APPROVED
submission.review_status_date = now
submission.save()
@staticmethod
def _revert(application):
application.submissions.update(
review_status=REVIEW_STATUS_PENDING, review_status_date=None
)
class AwaitingPaymentStep(AppStep):
"""Provides functionality for fulfilling or reverting the 'awaiting payment' step of an application"""
state = AppStates.AWAITING_PAYMENT.value
@staticmethod
def is_fulfilled(application):
return application.is_paid_in_full
@staticmethod
def _fulfill(application, **kwargs):
run = application.bootcamp_run
total_run_price = run.price
order, _ = Order.objects.update_or_create(
user=application.user,
application=application,
defaults=dict(status=Order.FULFILLED, total_price_paid=total_run_price),
)
Line.objects.update_or_create(
order=order, bootcamp_run=run, defaults=dict(price=total_run_price)
)
complete_successful_order(order, send_receipt=False)
@staticmethod
def _revert(application):
Order.objects.filter(application=application).delete()
ORDERED_APPLICATION_STEP_CLASSES = [
AwaitingProfileStep,
AwaitingResumeStep,
AwaitingSubmissionsStep,
AwaitingReviewStep,
AwaitingPaymentStep,
]
def set_application_state(application, target_state):
"""
Manipulates the given application into the target state.
Args:
application (BootcampApplication):
target_state (str): The desired state of the application
Returns:
BootcampApplication: The updated application
"""
if settings.ENVIRONMENT in {"prod", "production"}:
raise ValidationError("This API function cannot be used in production")
assert target_state in ALLOWED_STATES
if target_state == AppStates.COMPLETE.value:
previous_step_classes, next_step_classes = (
ORDERED_APPLICATION_STEP_CLASSES,
[],
)
target_step_cls = None
else:
target_state_cls_index = next(
i
for i, step_cls in enumerate(ORDERED_APPLICATION_STEP_CLASSES)
if step_cls.state == target_state
)
previous_step_classes, next_step_classes = partition_around_index(
ORDERED_APPLICATION_STEP_CLASSES, target_state_cls_index
)
target_step_cls = ORDERED_APPLICATION_STEP_CLASSES[target_state_cls_index]
# Revert all steps that come after the target
for step_cls in reversed(next_step_classes):
step_cls.revert(application)
# Apply/fulfill all steps before the target (if not already fulfilled)
for step_cls in previous_step_classes:
if not step_cls.is_fulfilled(application):
step_cls.fulfill(application)
if target_step_cls:
# Make sure that the target state hasn't already been fulfilled
target_step_cls.revert(application)
return application
|
bsd-3-clause
| 5,803,917,556,540,261,000
| 34.83682
| 116
| 0.648803
| false
| 4.068884
| false
| false
| false
|
TamiaLab/carnetdumaker
|
apps/gender/fields.py
|
1
|
1538
|
"""
Model fields for the gender app.
"""
from django.db import models
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from .constants import (GENDER_CHOICES,
GENDER_UNKNOWN)
class GenderFieldBase(models.CharField):
"""
This database model field can be used to store the gender of a person.
"""
description = _('A gender type object')
MAX_LENGTH = 1
def __init__(self, *args, **kwargs):
parent_kwargs = {
'max_length': self.MAX_LENGTH,
'choices': GENDER_CHOICES,
'default': GENDER_UNKNOWN,
'blank': True,
}
parent_kwargs.update(kwargs)
super(GenderFieldBase, self).__init__(*args, **parent_kwargs)
def deconstruct(self):
name, path, args, kwargs = super(GenderFieldBase, self).deconstruct()
if kwargs['choices'] == GENDER_CHOICES:
del kwargs['choices']
if kwargs['max_length'] == self.MAX_LENGTH:
del kwargs['max_length']
if kwargs['default'] == GENDER_UNKNOWN:
del kwargs['default']
if kwargs['blank']:
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "CharField"
class GenderField(six.with_metaclass(models.SubfieldBase,
GenderFieldBase)):
"""
Database gender field. Can be used to store a gender type.
See ``GenderFieldBase`` for details.
"""
pass
|
agpl-3.0
| 8,364,084,219,892,851,000
| 27.481481
| 77
| 0.587776
| false
| 4.179348
| false
| false
| false
|
JaredButcher/dayOfSagittariusIII
|
Server/sockServer.py
|
1
|
8357
|
import asyncio
import dataManagement
from enum import Enum, unique
import html
import json
import threading
import websockets
dataStor = None
def start(port, data):
global dataStor
dataStor = data
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
coro = websockets.server.serve(handle_conn, host='', port=port, loop=loop)
server = loop.run_until_complete(coro)
except OSError:
print("close")
else:
loop.run_forever()
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
async def handle_conn(conn, Uri):
print("URI: " + Uri)
user = client(conn)
await user.beginReceiveLoop()
class client:
def __init__(self, conn):
self.conn = conn
self.alive = True
self.errorCount = 0
self.user = None
self.receiveDele = []
self.error = False
async def beginReceiveLoop(self):
while self.alive:
global dataStor;
try:
data = await self.conn.recv()
except websockets.exceptions.ConnectionClosed as e:
self.destory()
break
#Start processing and consturcting response
print("Message: " + data)
res = {}
message = None
try:
message = json.loads(data)
if field.action.value in message:
#INITAL CONNECTION---------------------------------------------------------
if self.user is None:
if message[field.action.value] == action.init.value:
if field.session.value in message:
user = dataStor.getUser(message[field.session.value])
if user != None:
user.setSock(self)
self.user = user
self.user.rmGame()
if not self.user.getName() is None:
res[field.action.value] = action.name.value;
res[field.name.value] = self.user.getName()
self.send(res)
if self.user is None:
self.sendError(error.badInit.value)
#SET NAME-------------------------------------------------------------------
elif message[field.action.value] == action.name.value:
if dataStor.setUserName(self.user, message[field.name.value]):
res[field.action.value] = action.name.value
res[field.name.value] = self.user.getName()
self.send(res)
else:
self.sendError(error.nameUsed.value)
#SERVER BROWSER-------------------------------------------------------------
elif message[field.action.value] == action.servers.value:
self.user.rmGame()
res[field.action.value] = action.servers.value
res[field.servers.value] = dataStor.getSagInfo()
self.send(res)
#MAKE GAME--------------------------------------------------------------------
elif message[field.action.value] == action.makeGame.value:
self.user.rmGame()
gameB = message[field.game.value]
sagGame = None
try:
sagGame = dataStor.makeSagGame(self.user, gameB[game.name.value][:30], int(gameB[game.maxPlayers.value]),
int(gameB[game.damage.value]), int(gameB[game.shipPoints.value]))
except ValueError:
sagGame = None
if sagGame is None:
self.sendError(error.createFail.value)
else:
sagGame.addUser(self.user)
res[field.action.value] = action.join.value
res[field.game.value] = sagGame.getInfo()
self.send(res)
#JOIN GAME---------------------------------------------------------------------
elif message[field.action.value] == action.join.value:
self.user.rmGame()
sagGame = dataStor.getSagGame(message[field.game.value][game.id.value])
if sagGame is None or not sagGame.addUser(self.user):
self.sendError(error.joinFail.value)
else:
res[field.action.value] = action.join.value
res[field.game.value] = sagGame.getInfo()
self.send(res)
#UPDATE--------------------------------------------------------------------------
elif message[field.action.value] == action.update.value and self.user.game:
self.user.game.recUpdate(self.user, message[field.game.value])
except json.JSONDecodeError as e:
print(e.msg)
self.sendError(error.badRequest)
if not self.error:
self.errorCount = 0
self.error = False
def sendError(self, errorCode):
res = {}
res[field.action.value] = action.error.value
res[field.error.value] = errorCode
self.send(res)
def send(self, data):
asyncio.get_event_loop().create_task(self._sendHelper(json.dumps(data)))
async def _sendHelper(self, data):
try:
print("Send: " + str(data))
await self.conn.send(data)
except websockets.exceptions.ConnectionClosed as e:
print(e)
self.destory()
def destory(self):
self.alive = False
if self.user:
self.user.rmGame()
self.user.setSock(None)
@unique
class field(Enum):
action = "0"
session = "1"
servers = "2" #[browser]
game = "3" #game
chatContext = "4"
chatMessage = "5"
name = "6"
error = "7"
@unique
class action(Enum):
error = "1"
update = "2"
init = "3"
servers = "4"
join = "5"
name = "6"
makeGame = "7"
chat = "8"
command = "9"
@unique
class error(Enum):
repeat = "0"
stop = "1"
badRequest = "2"
joinFail = "3"
createFail = "4"
badInit = "5"
forbidden = "6"
nameUsed = "7"
@unique
class game(Enum):
id = "0"
players = "1" #[player]
running = "2"
winner = "3"
name = "4"
owner = "5"
maxPlayers = "6"
damage = "7"
shipPoints = "8"
mode = "9"
teams = "10"
map = "11"
@unique
class player(Enum):
id = "0"
name = "1"
team = "2"
gameObj = "3" #[gameObj]
primary = "4" #weapon
primaryAmmo = "5"
secondary = "6" #weapon
secondaryAmmo = "7"
attack = "8"
defense = "9"
scout = "10"
speed = "11"
isFlagship = "12"
ships = "13"
delete = "14"
ready = "15"
@unique
class transform(Enum):
id = "0"
pos = "1" #{x,y}
rot = "2"
targetPos = "3" #{x,y}
targetRot = "4"
posV = "5" #{x,y}
rotV = "6"
hide = "7"
destory = "8"
@unique
class gameObj(Enum):
size = "0"
type = "1"
transform = "2" #transform
@unique
class weapon(Enum):
lazer = "0"
missle = "1"
rail = "2"
mine = "3"
fighter = "4"
plazma = "5"
emc = "6"
jump = "7"
repair = "8"
@unique
class chatContext(Enum):
free = "0"
game = "1"
team = "2"
@unique
class command(Enum):
source = "0" #transform
fire = "1" #ammo used if applicatble
target = "2" #transform
split = "3" #Size of new fleet
merge = "4" #[transform]
weapon = "5"
@unique
class gameMap(Enum):
height = "0"
width = "1"
@unique
class objType(Enum):
fleet = "1"
scout = "2"
scoutMove = "3"
missle = "4"
plasma = "5"
rail = "6"
|
mit
| 310,050,558,187,058,500
| 31.905512
| 133
| 0.459495
| false
| 4.039149
| false
| false
| false
|
jumpstarter-io/nova
|
nova/tests/api/openstack/compute/plugins/v3/test_servers.py
|
1
|
134728
|
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import contextlib
import copy
import datetime
import uuid
import iso8601
import mock
import mox
from oslo.config import cfg
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import access_ips
from nova.api.openstack.compute.plugins.v3 import ips
from nova.api.openstack.compute.plugins.v3 import keypairs
from nova.api.openstack.compute.plugins.v3 import servers
from nova.api.openstack.compute.schemas.v3 import keypairs as keypairs_schema
from nova.api.openstack.compute.schemas.v3 import servers as servers_schema
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.i18n import _
from nova.image import glance
from nova.network import manager
from nova.network.neutronv2 import api as neutron_api
from nova import objects
from nova.objects import instance as instance_obj
from nova.openstack.common import jsonutils
from nova.openstack.common import policy as common_policy
from nova.openstack.common import timeutils
from nova import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests import matchers
from nova import utils as nova_utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = fakes.FAKE_UUID
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
def fake_gen_uuid():
return FAKE_UUID
def return_servers_empty(context, *args, **kwargs):
return []
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values, update_cells=True):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceInvalidState(
instance_uuid=instance['uuid'], attr='fake_attr',
method='fake_method', state='fake_state')
def fake_instance_get_by_uuid_not_found(context, uuid,
columns_to_join, use_slave=False):
raise exception.InstanceNotFound(instance_id=uuid)
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertIsNone(result)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertIsNone(result)
class NeutronV2Subclass(neutron_api.API):
"""Used to ensure that API handles subclasses properly."""
pass
class ControllerTest(test.TestCase):
def setUp(self):
super(ControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_instance_get()
return_servers = fakes.fake_instance_get_all_by_filters()
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers)
self.stubs.Set(db, 'instance_get_by_uuid',
return_server)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update_and_get_original)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
self.ips_controller = ips.IPsController()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
class ServersControllerTest(ControllerTest):
def setUp(self):
super(ServersControllerTest, self).setUp()
CONF.set_override('host', 'localhost', group='glance')
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertIn((uuid, None), res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_conflict_on_fixed_ip(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
addr = '10.0.0.1'
requested_networks = [{'uuid': network,
'fixed_ip': addr,
'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_neutronv2_disabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port)], res.as_tuples())
def test_requested_networks_neutronv2_subclass_with_port(self):
cls = 'nova.tests.api.openstack.compute.test_servers.NeutronV2Subclass'
self.flags(network_api_class=cls)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port)], res.as_tuples())
def test_get_server_by_uuid(self):
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_get_server_joins_pci_devices(self):
self.expected_attrs = None
def fake_get(_self, *args, **kwargs):
self.expected_attrs = kwargs['expected_attrs']
ctxt = context.RequestContext('fake', 'fake')
return fake_instance.fake_instance_obj(ctxt)
self.stubs.Set(compute_api.API, 'get', fake_get)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
self.controller.show(req, FAKE_UUID)
self.assertIn('pci_devices', self.expected_attrs)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the host_id's are unique.
"""
def return_instance_with_host(self, *args, **kwargs):
project_id = str(uuid.uuid4())
return fakes.stub_instance(id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid',
return_instance_with_host)
self.stubs.Set(db, 'instance_get',
return_instance_with_host)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": progress,
"name": "server1",
"status": status,
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % uuid,
},
],
}
}
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/images/10"
flavor_bookmark = "http://localhost/flavors/1"
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status="BUILD",
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/images/10"
flavor_bookmark = "http://localhost/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_id_image_ref_by_id(self):
image_ref = "10"
image_bookmark = "http://localhost/images/10"
flavor_id = "1"
flavor_bookmark = "http://localhost/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, image_ref=image_ref,
flavor_id=flavor_id, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_addresses_from_cache(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return_server = fakes.fake_instance_get(nw_cache=nw_cache)
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'type': 'fixed', 'mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'type': 'fixed', 'mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_get_server_addresses_nonexistent_network(self):
url = '/v3/servers/%s/ips/network_0' % FAKE_UUID
req = fakes.HTTPRequestV3.blank(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
def fake_instance_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
server_id = str(uuid.uuid4())
req = fakes.HTTPRequestV3.blank('/servers/%s/ips' % server_id)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, server_id)
def test_get_server_list_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequestV3.blank('/servers')
res_dict = self.controller.index(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_list_with_reservation_id(self):
req = fakes.HTTPRequestV3.blank('/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = fakes.HTTPRequestV3.blank('/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_server_details_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_details_with_limit(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = fakes.HTTPRequestV3.blank('/servers/detail'
'?limit=3&blah=2:t')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'blah': ['2:t'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v3/servers?marker=%s' % fakes.get_fake_uuid(2)
req = fakes.HTTPRequestV3.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '/v3/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)
req = fakes.HTTPRequestV3.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_bad_option(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_image(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_tenant_id_filter_converts_to_project_id_for_admin(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertEqual(filters['project_id'], 'newfake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers'
'?all_tenants=1&tenant_id=newfake',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_tenant_id_filter_no_admin_context(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?tenant_id=newfake')
res = self.controller.index(req)
self.assertIn('servers', res)
def test_tenant_id_filter_implies_all_tenants(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotEqual(filters, None)
# The project_id assertion checks that the project_id
# filter is set to that specified in the request url and
# not that of the context, verifying that the all_tenants
# flag was enabled
self.assertEqual(filters['project_id'], 'newfake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?tenant_id=newfake',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_normal(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_one(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_zero(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=0',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_false(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=false',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_invalid(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_pass_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1')
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_fail_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None):
self.assertIsNotNone(filters)
return [fakes.stub_instance(100)]
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:non_fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_with_bad_flavor(self):
req = fakes.HTTPRequestV3.blank('/servers?flavor=abcde')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = fakes.HTTPRequestV3.blank('/servers?flavor=abcde')
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_task_status(self):
server_uuid = str(uuid.uuid4())
task_state = task_states.REBOOTING
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
db_list = [fakes.stub_instance(100, uuid=server_uuid,
task_state=task_state)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_resize_status(self):
# Test when resize status, it maps list of vm states.
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = fakes.HTTPRequestV3.blank('/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = fakes.HTTPRequestV3.blank('/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_name(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
params = 'changes-since=2011-01-24T17:08:01Z'
req = fakes.HTTPRequestV3.blank('/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = fakes.HTTPRequestV3.blank('/servers?%s' % params)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertNotIn('unknown_option', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertIn('unknown_option', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequestV3.blank('/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
"""Test getting servers by ip."""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_all_server_details(self):
expected_flavor = {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/flavors/1',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/images/10',
},
],
}
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'BUILD')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
"""We want to make sure that if two instances are on the same host,
then they return the same hostId. If two instances are on different
hosts, they should return different hostIds. In this test,
there are 5 instances - 2 on one host and 3 on another.
"""
def return_servers_with_host(context, *args, **kwargs):
return [fakes.stub_instance(i + 1, 'fake', 'fake', host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in xrange(5)]
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_with_host)
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_get_servers_joins_pci_devices(self):
self.expected_attrs = None
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.expected_attrs = expected_attrs
return []
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers', use_admin_context=True)
self.assertIn('servers', self.controller.index(req))
self.assertIn('pci_devices', self.expected_attrs)
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
req.method = 'DELETE'
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid='non-existent-uuid')
def test_delete_server_instance_while_building(self):
req = self._create_delete_request(FAKE_UUID)
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
def test_delete_locked_server(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(compute_api.API, 'soft_delete',
fakes.fake_actions_to_locked_server)
self.stubs.Set(compute_api.API, 'delete',
fakes.fake_actions_to_locked_server)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP))
self.controller.delete(req, FAKE_UUID)
# Delete shoud be allowed in any case, even during resizing,
# because it may get stuck.
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(launched_at=None))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# delete() should be called for instance which has never been active,
# even if reclaim_instance_interval has been set.
self.assertEqual(self.server_delete_called, True)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/fake/images/%s' % image_uuid
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_href,
'metadata': {
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_not_string(self):
self.body['rebuild']['metadata']['key1'] = 1
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_fails_when_min_ram_too_small(self):
# make min_ram larger than our instance ram size
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_fails_when_min_disk_too_small(self):
# make min_disk larger than our instance disk size
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_image_too_large(self):
# make image size larger than our instance disk size
size = str(1000 * (1024 ** 3))
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=size)
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_all_blank(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.body['rebuild']['name'] = ' '
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_deleted_image(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with contextlib.nested(
mock.patch.object(fake._FakeImageService, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_start(self):
self.mox.StubOutWithMock(compute_api.API, 'start')
compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
def test_start_policy_failed(self):
rules = {
"compute:v3:servers:start":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._start_server,
req, FAKE_UUID, body)
self.assertIn("compute:v3:servers:start", exc.format_message())
def test_start_not_ready(self):
self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_start_locked_server(self):
self.stubs.Set(compute_api.API, 'start',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_start_invalid(self):
self.stubs.Set(compute_api.API, 'start', fake_start_stop_invalid_state)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_stop(self):
self.mox.StubOutWithMock(compute_api.API, 'stop')
compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
def test_stop_policy_failed(self):
rules = {
"compute:v3:servers:stop":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop='')
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._stop_server,
req, FAKE_UUID, body)
self.assertIn("compute:v3:servers:stop", exc.format_message())
def test_stop_not_ready(self):
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_stop_locked_server(self):
self.stubs.Set(compute_api.API, 'stop',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_stop_invalid_state(self):
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_invalid_state)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_start_with_bogus_id(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
req = fakes.HTTPRequestV3.blank('/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
def test_stop_with_bogus_id(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
req = fakes.HTTPRequestV3.blank('/servers/test_inst/action')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None, options=None):
if options:
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(**options))
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = jsonutils.dumps(body)
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_all_blank_spaces(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v3/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' ' * 64}}
req.body = jsonutils.dumps(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_admin_password_ignored(self):
inst_dict = dict(name='server_test', admin_password='bacon')
body = dict(server=inst_dict)
def server_update(context, id, params):
filtered_dict = {
'display_name': 'server_test',
}
self.assertEqual(params, filtered_dict)
filtered_dict['uuid'] = id
return filtered_dict
self.stubs.Set(db, 'instance_update', server_update)
# FIXME (comstud)
# self.stubs.Set(db, 'instance_get',
# return_server_with_attributes(name='server_test'))
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_not_found(self):
def fake_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'get', fake_get)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_not_found_on_update(self):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_update_and_get_original', fake_update)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_policy_fail(self):
rule = {'compute:update': common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, FAKE_UUID, body=body)
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def _get_with_state(self, vm_state, task_state=None):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_reboot_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:reboot':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_reboot, req, '1234',
{'reboot': {'type': 'HARD'}})
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_confirm_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:confirm_resize':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_confirm_resize, req, '1234', {})
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_revert_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:revert_resize':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_revert_resize, req, '1234', {})
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self.stubs)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params, update_cells=True):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return inst
def server_update_and_get_original(
context, instance_uuid, params, update_cells=False,
columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update_and_get_original)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.bdm = [{'delete_on_termination': 1,
'device_name': 123,
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _check_admin_password_len(self, server_dict):
"""utility function - check server_dict for admin_password length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
"""utility function - check server_dict for admin_password absence."""
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dumps(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': False,
}
db.flavor_create(context.get_admin_context(), values)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_instance,
flavor=1324)
def test_create_server_bad_image_href(self):
image_href = 1
self.body['server']['min_count'] = 1
self.body['server']['imageRef'] = image_href,
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-networks extension tests
# def test_create_server_with_invalid_networks_parameter(self):
# self.ext_mgr.extensions = {'os-networks': 'fake'}
# image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# flavor_ref = 'http://localhost/123/flavors/3'
# body = {
# 'server': {
# 'name': 'server_test',
# 'imageRef': image_href,
# 'flavorRef': flavor_ref,
# 'networks': {'uuid': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'},
# }
# }
# req = fakes.HTTPRequest.blank('/v2/fake/servers')
# req.method = 'POST'
# req.body = jsonutils.dumps(body)
# req.headers["content-type"] = "application/json"
# self.assertRaises(webob.exc.HTTPBadRequest,
# self.controller.create,
# req,
# body)
def test_create_server_with_deleted_image(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, self.image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, self.image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, body=self.body)
def test_create_server_image_too_large(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, self.image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, self.image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, self.image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, body=self.body)
def test_create_instance_image_ref_is_bookmark(self):
image_href = 'http://localhost/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_image_ref_is_invalid(self):
image_uuid = 'this_is_not_a_valid_uuid'
image_href = 'http://localhost/images/%s' % image_uuid
flavor_ref = 'http://localhost/flavors/3'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, body=self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-keypairs extension tests
# def test_create_instance_with_keypairs_enabled(self):
# self.ext_mgr.extensions = {'os-keypairs': 'fake'}
# key_name = 'green'
#
# params = {'key_name': key_name}
# old_create = compute_api.API.create
#
# # NOTE(sdague): key pair goes back to the database,
# # so we need to stub it out for tests
# def key_pair_get(context, user_id, name):
# return {'public_key': 'FAKE_KEY',
# 'fingerprint': 'FAKE_FINGERPRINT',
# 'name': name}
#
# def create(*args, **kwargs):
# self.assertEqual(kwargs['key_name'], key_name)
# return old_create(*args, **kwargs)
#
# self.stubs.Set(db, 'key_pair_get', key_pair_get)
# self.stubs.Set(compute_api.API, 'create', create)
# self._test_create_extra(params)
#
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-networks extension tests
# def test_create_instance_with_networks_enabled(self):
# self.ext_mgr.extensions = {'os-networks': 'fake'}
# net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# requested_networks = [{'uuid': net_uuid}]
# params = {'networks': requested_networks}
# old_create = compute_api.API.create
# def create(*args, **kwargs):
# result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)]
# self.assertEqual(kwargs['requested_networks'], result)
# return old_create(*args, **kwargs)
# self.stubs.Set(compute_api.API, 'create', create)
# self._test_create_extra(params)
def test_create_instance_with_port_with_no_fixed_ips(self):
port_id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port_id}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortRequiresFixedIP(port_id=port_id)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_raise_user_data_too_large(self, mock_create):
mock_create.side_effect = exception.InstanceUserDataTooLarge(
maxsize=1, length=2)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_network_with_no_subnet(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkRequiresSubnet(network_uuid=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_non_unique_secgroup_name(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
def fake_create(*args, **kwargs):
raise exception.NoUniqueMatch("No Unique match found for ...")
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_with_networks_disabled_neutronv2(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled(self):
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['requested_networks'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v3/'
self.flags(enable_instance_password=False)
image_href = 'http://localhost/v2/fake/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_name_too_long(self):
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['name'] = 'X' * 256
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_name_all_blank_spaces(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/images/%s' % image_uuid
flavor_ref = 'http://localhost/flavors/3'
body = {
'server': {
'name': ' ' * 64,
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequest.blank('/v3/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_instance(self):
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_extension_create_exception(self):
def fake_keypair_server_create(self, server_dict,
create_kwargs):
raise KeyError
self.stubs.Set(keypairs.Keypairs, 'server_create',
fake_keypair_server_create)
# proper local hrefs must start with 'http://localhost/v3/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.create, req, body=body)
def test_create_instance_pass_disabled(self):
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_too_much_metadata(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_user_data_malformed_bad_request(self):
params = {'user_data': 'u1234'}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
def test_create_instance_invalid_key_name(self):
image_href = 'http://localhost/v2/images/2'
self.body['server']['imageRef'] = image_href
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_valid_key_name(self):
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_instance_invalid_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_int(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = -1
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_href(self):
image_href = 'asdf'
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_local_href(self):
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_password(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(server['adminPass'],
self.body['server']['adminPass'])
def test_create_instance_admin_password_pass_disabled(self):
self.flags(enable_instance_password=False)
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertIn('server', res)
self.assertIn('adminPass', self.body['server'])
def test_create_instance_admin_password_empty(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dumps(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body)
def test_create_location(self):
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
self.req.body = jsonutils.dumps(self.body)
robj = self.controller.create(self.req, body=self.body)
self.assertEqual(robj['Location'], selfhref)
def _do_test_create_instance_above_quota(self, resource, allowed, quota,
expected_msg):
fakes.stub_out_instance_quota(self.stubs, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dumps(self.body)
try:
self.controller.create(self.req, body=self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = _('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = _('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = _('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
def test_create_instance_with_neutronv2_port_in_use(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortInUse(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_public_network_non_admin(self, mock_create):
public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'networks': [{'uuid': public_network_uuid}]}
self.req.body = jsonutils.dumps(self.body)
mock_create.side_effect = exception.ExternalNetworkAttachForbidden(
network_uuid=public_network_uuid)
self.assertRaises(webob.exc.HTTPForbidden,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_multiple_instance_with_neutronv2_port(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
def fake_create(*args, **kwargs):
msg = _("Unable to launch multiple instances with"
" a single configured port ID. Please launch your"
" instance one by one with different ports.")
raise exception.MultiplePortsNotApplicable(reason=msg)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neturonv2_not_found_network(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkNotFound(network_id=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neutronv2_port_not_found(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortNotFound(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_network_ambiguous(self, mock_create):
mock_create.side_effect = exception.NetworkAmbiguous()
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, {})
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_fixed_ip_already_in_use(self,
create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
class ServersViewBuilderTest(test.TestCase):
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
CONF.set_override('host', 'localhost', group='glance')
self.flags(use_ipv6=True)
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid="deadbeef-feed-edee-beef-d0ea7beefedd",
display_name="test_server",
include_fake_metadata=False)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
def floaters(*args, **kwargs):
return []
fakes.stub_out_nw_api_get_instance_nw_info(self.stubs, nw_info)
fakes.stub_out_nw_api_get_floating_ips_by_fixed_address(self.stubs,
floaters)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilderV3()
self.request = fakes.HTTPRequestV3.blank("")
self.request.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/flavors/1"
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, expected)
def test_build_server(self):
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % self.uuid,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
}
}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
# Regardless of vm_state deleted servers sholud be DELETED
self.assertEqual("DELETED", output['server']['status'])
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
# set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'type': 'fixed', 'mac_addr': 'aa:aa:aa:aa:aa:aa'},
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServersAllExtensionsTestCase(test.TestCase):
"""Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, AccessIPsController extends servers.Controller::
| @wsgi.extends
| def create(self, req, resp_obj, body):
| context = req.environ['nova.context']
| if authorize(context) and 'server' in resp_obj.obj:
| resp_obj.attach(xml=AccessIPTemplate())
| server = resp_obj.obj['server']
| self._extend_server(req, server)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouterV3()
def test_create_missing_server(self):
# Test create with malformed body.
def fake_create(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_create)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
def test_update_missing_server(self):
# Test update with malformed body.
def fake_update(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'update', fake_update)
req = fakes.HTTPRequestV3.blank('/servers/1')
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
class ServersInvalidRequestTestCase(test.TestCase):
"""Tests of places we throw 400 Bad Request from."""
def setUp(self):
super(ServersInvalidRequestTestCase, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def _invalid_server_create(self, body):
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_server_no_body(self):
self._invalid_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_no_body(self):
self._invalid_server_create(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
class FakeExt(extensions.V3APIExtensionBase):
name = "AccessIPs"
alias = 'os-access-ips'
version = 1
def fake_extension_point(self, *args, **kwargs):
pass
def get_controller_extensions(self):
return []
def get_resources(self):
return []
class TestServersExtensionPoint(test.NoDBTestCase):
def setUp(self):
super(TestServersExtensionPoint, self).setUp()
CONF.set_override('extensions_whitelist', ['os-access-ips'],
'osapi_v3')
self.stubs.Set(access_ips, 'AccessIPs', FakeExt)
def _test_load_extension_point(self, name):
setattr(FakeExt, 'server_%s' % name,
FakeExt.fake_extension_point)
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.assertEqual(
'os-access-ips',
list(getattr(controller,
'%s_extension_manager' % name))[0].obj.alias)
delattr(FakeExt, 'server_%s' % name)
def test_load_update_extension_point(self):
self._test_load_extension_point('update')
def test_load_rebuild_extension_point(self):
self._test_load_extension_point('rebuild')
def test_load_create_extension_point(self):
self._test_load_extension_point('create')
class TestServersExtensionSchema(test.NoDBTestCase):
def setUp(self):
super(TestServersExtensionSchema, self).setUp()
CONF.set_override('extensions_whitelist', ['keypairs'], 'osapi_v3')
def _test_load_extension_schema(self, name):
setattr(FakeExt, 'get_server_%s_schema' % name,
FakeExt.fake_extension_point)
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.assertTrue(hasattr(controller, '%s_schema_manager' % name))
delattr(FakeExt, 'get_server_%s_schema' % name)
return getattr(controller, 'schema_server_%s' % name)
def test_load_create_extension_point(self):
# The expected is the schema combination of base and keypairs
# because of the above extensions_whitelist.
expected_schema = copy.deepcopy(servers_schema.base_create)
expected_schema['properties']['server']['properties'].update(
keypairs_schema.server_create)
actual_schema = self._test_load_extension_schema('create')
self.assertEqual(expected_schema, actual_schema)
def test_load_update_extension_point(self):
# keypair extension does not contain update_server() and
# here checks that any extension is not added to the schema.
expected_schema = copy.deepcopy(servers_schema.base_update)
actual_schema = self._test_load_extension_schema('update')
self.assertEqual(expected_schema, actual_schema)
def test_load_rebuild_extension_point(self):
# keypair extension does not contain rebuild_server() and
# here checks that any extension is not added to the schema.
expected_schema = copy.deepcopy(servers_schema.base_rebuild)
actual_schema = self._test_load_extension_schema('rebuild')
self.assertEqual(expected_schema, actual_schema)
|
apache-2.0
| 2,637,198,558,954,699,000
| 40.595554
| 79
| 0.559654
| false
| 3.954563
| true
| false
| false
|
census-instrumentation/opencensus-python
|
contrib/opencensus-ext-requests/setup.py
|
1
|
1936
|
# Copyright 2019, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages, setup
from version import __version__
setup(
name='opencensus-ext-requests',
version=__version__, # noqa
author='OpenCensus Authors',
author_email='census-developers@googlegroups.com',
classifiers=[
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
description='OpenCensus Requests Integration',
include_package_data=True,
long_description=open('README.rst').read(),
install_requires=[
'opencensus >= 0.8.dev0, < 1.0.0',
'wrapt >= 1.0.0, < 2.0.0',
],
extras_require={},
license='Apache-2.0',
packages=find_packages(exclude=('tests',)),
namespace_packages=[],
url='https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-requests', # noqa: E501
zip_safe=False,
)
|
apache-2.0
| 4,184,736,613,953,377,000
| 36.960784
| 128
| 0.66374
| false
| 4.008282
| false
| false
| false
|
foursquare/pants
|
contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/variable_names.py
|
1
|
4706
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import ast
import keyword
import re
from functools import wraps
import six
from pants.contrib.python.checks.tasks.checkstyle.common import CheckstylePlugin
ALL_LOWER_CASE_RE = re.compile(r'^[a-z][a-z\d]*$')
ALL_UPPER_CASE_RE = re.compile(r'^[A-Z][A-Z\d]+$')
LOWER_SNAKE_RE = re.compile(r'^([a-z][a-z\d]*)(_[a-z\d]+)*$')
UPPER_SNAKE_RE = re.compile(r'^([A-Z][A-Z\d]*)(_[A-Z\d]+)*$')
UPPER_CAMEL_RE = re.compile(r'^([A-Z][a-z\d]*)+$')
RESERVED_NAMES = frozenset(keyword.kwlist)
BUILTIN_NAMES = dir(six.moves.builtins)
def allow_underscores(num):
def wrap(function):
@wraps(function)
def wrapped_function(name):
if name.startswith('_' * (num + 1)):
return False
return function(name.lstrip('_'))
return wrapped_function
return wrap
@allow_underscores(1)
def is_upper_camel(name):
"""UpperCamel, AllowingHTTPAbbrevations, _WithUpToOneUnderscoreAllowable."""
return bool(UPPER_CAMEL_RE.match(name) and not ALL_UPPER_CASE_RE.match(name))
@allow_underscores(2)
def is_lower_snake(name):
"""lower_snake_case, _with, __two_underscores_allowable."""
return LOWER_SNAKE_RE.match(name) is not None
def is_reserved_name(name):
return name in BUILTIN_NAMES or name in RESERVED_NAMES
def is_reserved_with_trailing_underscore(name):
"""For example, super_, id_, type_"""
if name.endswith('_') and not name.endswith('__'):
return is_reserved_name(name[:-1])
return False
def is_builtin_name(name):
"""For example, __foo__ or __bar__."""
if name.startswith('__') and name.endswith('__'):
return ALL_LOWER_CASE_RE.match(name[2:-2]) is not None
return False
@allow_underscores(2)
def is_constant(name):
return UPPER_SNAKE_RE.match(name) is not None
class PEP8VariableNames(CheckstylePlugin):
"""Enforces PEP8 recommendations for variable names.
Specifically:
UpperCamel class names
lower_snake / _lower_snake / __lower_snake function names
lower_snake expression variable names
CLASS_LEVEL_CONSTANTS = {}
GLOBAL_LEVEL_CONSTANTS = {}
"""
CLASS_GLOBAL_BUILTINS = frozenset((
'__slots__',
'__metaclass__',
))
def iter_class_methods(self, class_node):
for node in class_node.body:
if isinstance(node, ast.FunctionDef):
yield node
def iter_class_globals(self, class_node):
for node in class_node.body:
# TODO(wickman) Occasionally you have the pattern where you set methods equal to each other
# which should be allowable, for example:
# class Foo(object):
# def bar(self):
# pass
# alt_bar = bar
if isinstance(node, ast.Assign):
for name in node.targets:
if isinstance(name, ast.Name):
yield name
def nits(self):
class_methods = set()
all_methods = set(function_def for function_def in ast.walk(self.python_file.tree)
if isinstance(function_def, ast.FunctionDef))
for class_def in self.iter_ast_types(ast.ClassDef):
if not is_upper_camel(class_def.name):
yield self.error('T000', 'Classes must be UpperCamelCased', class_def)
for class_global in self.iter_class_globals(class_def):
if not is_constant(class_global.id) and class_global.id not in self.CLASS_GLOBAL_BUILTINS:
yield self.error('T001', 'Class globals must be UPPER_SNAKE_CASED', class_global)
if not class_def.bases or all(isinstance(base, ast.Name) and base.id == 'object'
for base in class_def.bases):
class_methods.update(self.iter_class_methods(class_def))
else:
# If the class is inheriting from anything that is potentially a bad actor, rely
# upon checking that bad actor out of band. Fixes PANTS-172.
for method in self.iter_class_methods(class_def):
all_methods.discard(method)
for function_def in all_methods - class_methods:
if is_reserved_name(function_def.name):
yield self.error('T801', 'Method name overrides a builtin.', function_def)
# TODO(wickman) Only enforce this for classes that derive from object. If they
# don't derive object, it's possible that the superclass naming is out of its
# control.
for function_def in all_methods:
if not any((is_lower_snake(function_def.name),
is_builtin_name(function_def.name),
is_reserved_with_trailing_underscore(function_def.name))):
yield self.error('T002', 'Method names must be lower_snake_cased', function_def)
|
apache-2.0
| -250,211,113,857,295,550
| 33.602941
| 98
| 0.671058
| false
| 3.375897
| false
| false
| false
|
avigad/boole
|
old/expr_examples.py
|
1
|
2607
|
################################################################################
#
# expr_examples.py
#
################################################################################
from boole.core.model import *
################################################################################
#
# Examples
#
################################################################################
if __name__ == '__main__':
print "Built-in language:"
print
built_in_language.show()
print
i, j, k = Int('i j k')
x = Const('x', Real)
y, z = Real('y, z')
p, q, r = Bool('p q r')
A = BasicType('A')
B = BasicType('B')
f = (A >> B)('f')
g = Const('g', A * A >> B)
a1, a2 = A('a1 a2')
print 'Global language:'
print
global_language.show()
print
def check(e):
print 'Expression:', e
try:
etype = e.etype()
except TypeError as err:
print 'Type error:', err
else:
print 'Type:', etype
print
check(j)
check(i + j)
check(x)
check(x + i)
check(i + rr(4.2))
check(f(a1))
check(f(a1, a2))
check(g(a1))
check(g(a1, a2))
check(ii(42))
check(rr(42))
adder = Abs([i, j], i + j)
check(adder)
check(adder(i, ii(3)))
check(plus)
check(x ** ii(2) + ii(3) * x + ii(7))
check(j ** ii(2) + ii(3) * j + ii(7))
check(Sum(x ** ii(2), ii(3) * x, ii(7)))
check(Sum(j ** ii(2), ii(3) * j, ii(7)))
check((x * rr(3.0) >= ii(17)) & (p | q))
check(x + p)
check(p & q)
check(And(p,q))
check(And(p, q, r))
check(~And(p, ~q, (r | p)))
check(Forall(x, x == x))
check(Forall([x, y], x == y))
check(Exists([x, y], (rr(0) < x) & (x + y < rr(3))))
L = Language()
set_default_language(L)
m, n, p, q = Int('m n p q')
prime = Const('Prime', Int >> Bool)
even = Const('Even', Int >> Bool)
f = (Int >> Int)('f')
People = EnumType('People', ['Alice', 'Bob', 'Cathy'])
Alice, Bob, Cathy = People.make_constants()
x = People('x')
print 'Language L:'
print
L.show()
print
check (Forall([f, m, n], f(m) == f(n)))
def precond(n):
return (ii(2) < n) & even(n)
def goldbach(n):
return Exists([p,q], (precond(n)) >>
(prime(p) & prime(q) & (p + q == n)))
Goldbach = Forall(n, goldbach(n))
check(Goldbach)
check(Forall(x, (x == Alice) | (x == Bob) | (x == Cathy)))
check(Forall(x, (x == Alice) | (x == Bob)))
|
apache-2.0
| -8,128,380,361,066,723,000
| 23.364486
| 80
| 0.392789
| false
| 3.045561
| false
| false
| false
|
warner/petmail
|
src/petmail/eventsource.py
|
1
|
7209
|
from twisted.python import log, failure
from twisted.internet import reactor, defer, protocol
from twisted.application import service
from twisted.protocols import basic
from twisted.web.client import Agent, ResponseDone
from twisted.web.http_headers import Headers
from .eventual import eventually
class EventSourceParser(basic.LineOnlyReceiver):
delimiter = "\n"
def __init__(self, handler):
self.current_field = None
self.current_lines = []
self.handler = handler
self.done_deferred = defer.Deferred()
def connectionLost(self, why):
if why.check(ResponseDone):
why = None
self.done_deferred.callback(why)
def dataReceived(self, data):
# exceptions here aren't being logged properly, and tests will hang
# rather than halt. I suspect twisted.web._newclient's
# HTTP11ClientProtocol.dataReceived(), which catches everything and
# responds with self._giveUp() but doesn't log.err.
try:
basic.LineOnlyReceiver.dataReceived(self, data)
except:
log.err()
raise
def lineReceived(self, line):
if not line:
# blank line ends the field
self.fieldReceived(self.current_field,
"\n".join(self.current_lines))
self.current_field = None
self.current_lines[:] = []
return
if self.current_field is None:
self.current_field, data = line.split(": ", 1)
self.current_lines.append(data)
else:
self.current_lines.append(line)
def fieldReceived(self, name, data):
self.handler(name, data)
class EventSourceError(Exception):
pass
# es = EventSource(url, handler)
# d = es.start()
# es.cancel()
class EventSource: # TODO: service.Service
def __init__(self, url, handler, when_connected=None):
self.url = url
self.handler = handler
self.when_connected = when_connected
self.started = False
self.cancelled = False
self.proto = EventSourceParser(self.handler)
def start(self):
assert not self.started, "single-use"
self.started = True
a = Agent(reactor)
d = a.request("GET", self.url,
Headers({"accept": ["text/event-stream"]}))
d.addCallback(self._connected)
return d
def _connected(self, resp):
if resp.code != 200:
raise EventSourceError("%d: %s" % (resp.code, resp.phrase))
if self.when_connected:
self.when_connected()
#if resp.headers.getRawHeaders("content-type") == ["text/event-stream"]:
resp.deliverBody(self.proto)
if self.cancelled:
self.kill_connection()
return self.proto.done_deferred
def cancel(self):
self.cancelled = True
if not self.proto.transport:
# _connected hasn't been called yet, but that self.cancelled
# should take care of it when the connection is established
def kill(data):
# this should kill it as soon as any data is delivered
raise ValueError("dead")
self.proto.dataReceived = kill # just in case
return
self.kill_connection()
def kill_connection(self):
if (hasattr(self.proto.transport, "_producer")
and self.proto.transport._producer):
# This is gross and fragile. We need a clean way to stop the
# client connection. p.transport is a
# twisted.web._newclient.TransportProxyProducer , and its
# ._producer is the tcp.Port.
self.proto.transport._producer.loseConnection()
else:
log.err("get_events: unable to stop connection")
# oh well
#err = EventSourceError("unable to cancel")
try:
self.proto.done_deferred.callback(None)
except defer.AlreadyCalledError:
pass
class Connector:
# behave enough like an IConnector to appease ReconnectingClientFactory
def __init__(self, res):
self.res = res
def connect(self):
self.res._maybeStart()
def stopConnecting(self):
self.res._stop_eventsource()
class ReconnectingEventSource(service.MultiService,
protocol.ReconnectingClientFactory):
def __init__(self, baseurl, connection_starting, handler):
service.MultiService.__init__(self)
# we don't use any of the basic Factory/ClientFactory methods of
# this, just the ReconnectingClientFactory.retry, stopTrying, and
# resetDelay methods.
self.baseurl = baseurl
self.connection_starting = connection_starting
self.handler = handler
# IService provides self.running, toggled by {start,stop}Service.
# self.active is toggled by {,de}activate. If both .running and
# .active are True, then we want to have an outstanding EventSource
# and will start one if necessary. If either is False, then we don't
# want one to be outstanding, and will initiate shutdown.
self.active = False
self.connector = Connector(self)
self.es = None # set we have an outstanding EventSource
self.when_stopped = [] # list of Deferreds
def isStopped(self):
return not self.es
def startService(self):
service.MultiService.startService(self) # sets self.running
self._maybeStart()
def stopService(self):
# clears self.running
d = defer.maybeDeferred(service.MultiService.stopService, self)
d.addCallback(self._maybeStop)
return d
def activate(self):
assert not self.active
self.active = True
self._maybeStart()
def deactivate(self):
assert self.active # XXX
self.active = False
return self._maybeStop()
def _maybeStart(self):
if not (self.active and self.running):
return
self.continueTrying = True
url = self.connection_starting()
self.es = EventSource(url, self.handler, self.resetDelay)
d = self.es.start()
d.addBoth(self._stopped)
def _stopped(self, res):
self.es = None
# we might have stopped because of a connection error, or because of
# an intentional shutdown.
if self.active and self.running:
# we still want to be connected, so schedule a reconnection
if isinstance(res, failure.Failure):
log.err(res)
self.retry() # will eventually call _maybeStart
return
# intentional shutdown
self.stopTrying()
for d in self.when_stopped:
eventually(d.callback, None)
self.when_stopped = []
def _stop_eventsource(self):
if self.es:
eventually(self.es.cancel)
def _maybeStop(self, _=None):
self.stopTrying() # cancels timer, calls _stop_eventsource()
if not self.es:
return defer.succeed(None)
d = defer.Deferred()
self.when_stopped.append(d)
return d
|
mit
| 1,099,797,958,269,554,300
| 34.165854
| 80
| 0.609793
| false
| 4.303881
| false
| false
| false
|
anilpai/leetcode
|
Tests/test_matrix.py
|
1
|
1516
|
from unittest import TestCase
from Matrix.MatrixInSpiral import Solution as a
from Matrix.MatrixRotate90deg import Solution as b
from Matrix.RotateMatrix import Solution as c
class TestSolution(TestCase):
def test_spiralmatrix(self):
r = a()
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]
]
self.assertListEqual(r.SpiralMatrix(matrix), [1, 2, 3, 4, 8, 12, 16, 15, 14, 13, 9, 5, 6, 7, 11, 10],'Spiral Matrix')
def test_matrixRotate(self):
r = b()
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]
]
self.assertListEqual(r.Rotate90Clock(matrix), [[13, 9, 5, 1], [14, 10, 6, 2], [15, 11, 7, 3], [16, 12, 8, 4]], 'Rotate 90 clockwise')
self.assertListEqual(r.Rotate90CounterClock(matrix), [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]], 'Rotate 90 anti-clockwise')
def test_matrixMove(self):
r = c()
matrix = [
[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]
]
self.assertListEqual(r.rotateMatrixClockwise(matrix), [[5, 1, 2, 3], [9, 10, 6, 4], [13, 11, 7, 8], [14, 15, 16, 12]], 'Rotate one step clockwise')
self.assertListEqual(r.rotateMatrixCounterClockwise(matrix), [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]], 'Rotate one step anti-clockwise')
|
mit
| -5,891,117,889,782,187,000
| 38.921053
| 167
| 0.509235
| false
| 2.966732
| true
| false
| false
|
boblefrag/lolyx
|
lolyx/urls.py
|
1
|
1605
|
# -*- coding: utf-8 -*- pylint: disable-msg=R0801
#
# Copyright (c) 2013 Rodolphe Quiédeville <rodolphe@quiedeville.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.contrib import admin
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from django.core.urlresolvers import reverse_lazy
# Uncomment the next two lines to enable the admin:
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.urls')),
url(r'^cv/', include('lolyx.resume.urls')),
url(r'^offres/', include('lolyx.resume.urls')),
url(r'^accounts/profile/$', 'lolyx.llx.views.profile'),
url(r'^$', 'lolyx.llx.views.home', name='home'),
url(r'^search/cv/$', RedirectView.as_view(url=reverse_lazy('resume'))),
# TODO: Use reverse_lazy and not hard the path
url(r'^search/cv/date.php$', RedirectView.as_view(url='/cv/date/')),
)
|
gpl-3.0
| 8,413,431,553,604,031,000
| 42.351351
| 75
| 0.700748
| false
| 3.525275
| false
| false
| false
|
tensorflow/models
|
research/object_detection/builders/preprocessor_builder.py
|
1
|
18154
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder for preprocessing steps."""
import tensorflow.compat.v1 as tf
from object_detection.core import preprocessor
from object_detection.protos import preprocessor_pb2
def _get_step_config_from_proto(preprocessor_step_config, step_name):
"""Returns the value of a field named step_name from proto.
Args:
preprocessor_step_config: A preprocessor_pb2.PreprocessingStep object.
step_name: Name of the field to get value from.
Returns:
result_dict: a sub proto message from preprocessor_step_config which will be
later converted to a dictionary.
Raises:
ValueError: If field does not exist in proto.
"""
for field, value in preprocessor_step_config.ListFields():
if field.name == step_name:
return value
raise ValueError('Could not get field %s from proto!' % step_name)
def _get_dict_from_proto(config):
"""Helper function to put all proto fields into a dictionary.
For many preprocessing steps, there's an trivial 1-1 mapping from proto fields
to function arguments. This function automatically populates a dictionary with
the arguments from the proto.
Protos that CANNOT be trivially populated include:
* nested messages.
* steps that check if an optional field is set (ie. where None != 0).
* protos that don't map 1-1 to arguments (ie. list should be reshaped).
* fields requiring additional validation (ie. repeated field has n elements).
Args:
config: A protobuf object that does not violate the conditions above.
Returns:
result_dict: |config| converted into a python dictionary.
"""
result_dict = {}
for field, value in config.ListFields():
result_dict[field.name] = value
return result_dict
# A map from a PreprocessingStep proto config field name to the preprocessing
# function that should be used. The PreprocessingStep proto should be parsable
# with _get_dict_from_proto.
PREPROCESSING_FUNCTION_MAP = {
'normalize_image':
preprocessor.normalize_image,
'random_pixel_value_scale':
preprocessor.random_pixel_value_scale,
'random_image_scale':
preprocessor.random_image_scale,
'random_rgb_to_gray':
preprocessor.random_rgb_to_gray,
'random_adjust_brightness':
preprocessor.random_adjust_brightness,
'random_adjust_contrast':
preprocessor.random_adjust_contrast,
'random_adjust_hue':
preprocessor.random_adjust_hue,
'random_adjust_saturation':
preprocessor.random_adjust_saturation,
'random_distort_color':
preprocessor.random_distort_color,
'random_crop_to_aspect_ratio':
preprocessor.random_crop_to_aspect_ratio,
'random_black_patches':
preprocessor.random_black_patches,
'random_jpeg_quality':
preprocessor.random_jpeg_quality,
'random_downscale_to_target_pixels':
preprocessor.random_downscale_to_target_pixels,
'random_patch_gaussian':
preprocessor.random_patch_gaussian,
'rgb_to_gray':
preprocessor.rgb_to_gray,
'scale_boxes_to_pixel_coordinates':
(preprocessor.scale_boxes_to_pixel_coordinates),
'subtract_channel_mean':
preprocessor.subtract_channel_mean,
'convert_class_logits_to_softmax':
preprocessor.convert_class_logits_to_softmax,
'adjust_gamma':
preprocessor.adjust_gamma,
}
# A map to convert from preprocessor_pb2.ResizeImage.Method enum to
# tf.image.ResizeMethod.
RESIZE_METHOD_MAP = {
preprocessor_pb2.ResizeImage.AREA: tf.image.ResizeMethod.AREA,
preprocessor_pb2.ResizeImage.BICUBIC: tf.image.ResizeMethod.BICUBIC,
preprocessor_pb2.ResizeImage.BILINEAR: tf.image.ResizeMethod.BILINEAR,
preprocessor_pb2.ResizeImage.NEAREST_NEIGHBOR: (
tf.image.ResizeMethod.NEAREST_NEIGHBOR),
}
def get_random_jitter_kwargs(proto):
return {
'ratio':
proto.ratio,
'jitter_mode':
preprocessor_pb2.RandomJitterBoxes.JitterMode.Name(proto.jitter_mode
).lower()
}
def build(preprocessor_step_config):
"""Builds preprocessing step based on the configuration.
Args:
preprocessor_step_config: PreprocessingStep configuration proto.
Returns:
function, argmap: A callable function and an argument map to call function
with.
Raises:
ValueError: On invalid configuration.
"""
step_type = preprocessor_step_config.WhichOneof('preprocessing_step')
if step_type in PREPROCESSING_FUNCTION_MAP:
preprocessing_function = PREPROCESSING_FUNCTION_MAP[step_type]
step_config = _get_step_config_from_proto(preprocessor_step_config,
step_type)
function_args = _get_dict_from_proto(step_config)
return (preprocessing_function, function_args)
if step_type == 'random_horizontal_flip':
config = preprocessor_step_config.random_horizontal_flip
return (preprocessor.random_horizontal_flip,
{
'keypoint_flip_permutation': tuple(
config.keypoint_flip_permutation) or None,
'probability': config.probability or None,
})
if step_type == 'random_vertical_flip':
config = preprocessor_step_config.random_vertical_flip
return (preprocessor.random_vertical_flip,
{
'keypoint_flip_permutation': tuple(
config.keypoint_flip_permutation) or None,
'probability': config.probability or None,
})
if step_type == 'random_rotation90':
config = preprocessor_step_config.random_rotation90
return (preprocessor.random_rotation90,
{
'keypoint_rot_permutation': tuple(
config.keypoint_rot_permutation) or None,
'probability': config.probability or None,
})
if step_type == 'random_crop_image':
config = preprocessor_step_config.random_crop_image
return (preprocessor.random_crop_image,
{
'min_object_covered': config.min_object_covered,
'aspect_ratio_range': (config.min_aspect_ratio,
config.max_aspect_ratio),
'area_range': (config.min_area, config.max_area),
'overlap_thresh': config.overlap_thresh,
'clip_boxes': config.clip_boxes,
'random_coef': config.random_coef,
})
if step_type == 'random_pad_image':
config = preprocessor_step_config.random_pad_image
min_image_size = None
if (config.HasField('min_image_height') !=
config.HasField('min_image_width')):
raise ValueError('min_image_height and min_image_width should be either '
'both set or both unset.')
if config.HasField('min_image_height'):
min_image_size = (config.min_image_height, config.min_image_width)
max_image_size = None
if (config.HasField('max_image_height') !=
config.HasField('max_image_width')):
raise ValueError('max_image_height and max_image_width should be either '
'both set or both unset.')
if config.HasField('max_image_height'):
max_image_size = (config.max_image_height, config.max_image_width)
pad_color = config.pad_color or None
if pad_color:
if len(pad_color) != 3:
tf.logging.warn('pad_color should have 3 elements (RGB) if set!')
pad_color = tf.cast([x for x in config.pad_color], dtype=tf.float32)
return (preprocessor.random_pad_image,
{
'min_image_size': min_image_size,
'max_image_size': max_image_size,
'pad_color': pad_color,
})
if step_type == 'random_absolute_pad_image':
config = preprocessor_step_config.random_absolute_pad_image
max_height_padding = config.max_height_padding or 1
max_width_padding = config.max_width_padding or 1
pad_color = config.pad_color or None
if pad_color:
if len(pad_color) != 3:
tf.logging.warn('pad_color should have 3 elements (RGB) if set!')
pad_color = tf.cast([x for x in config.pad_color], dtype=tf.float32)
return (preprocessor.random_absolute_pad_image,
{
'max_height_padding': max_height_padding,
'max_width_padding': max_width_padding,
'pad_color': pad_color,
})
if step_type == 'random_crop_pad_image':
config = preprocessor_step_config.random_crop_pad_image
min_padded_size_ratio = config.min_padded_size_ratio
if min_padded_size_ratio and len(min_padded_size_ratio) != 2:
raise ValueError('min_padded_size_ratio should have 2 elements if set!')
max_padded_size_ratio = config.max_padded_size_ratio
if max_padded_size_ratio and len(max_padded_size_ratio) != 2:
raise ValueError('max_padded_size_ratio should have 2 elements if set!')
pad_color = config.pad_color or None
if pad_color:
if len(pad_color) != 3:
tf.logging.warn('pad_color should have 3 elements (RGB) if set!')
pad_color = tf.cast([x for x in config.pad_color], dtype=tf.float32)
kwargs = {
'min_object_covered': config.min_object_covered,
'aspect_ratio_range': (config.min_aspect_ratio,
config.max_aspect_ratio),
'area_range': (config.min_area, config.max_area),
'overlap_thresh': config.overlap_thresh,
'clip_boxes': config.clip_boxes,
'random_coef': config.random_coef,
'pad_color': pad_color,
}
if min_padded_size_ratio:
kwargs['min_padded_size_ratio'] = tuple(min_padded_size_ratio)
if max_padded_size_ratio:
kwargs['max_padded_size_ratio'] = tuple(max_padded_size_ratio)
return (preprocessor.random_crop_pad_image, kwargs)
if step_type == 'random_resize_method':
config = preprocessor_step_config.random_resize_method
return (preprocessor.random_resize_method,
{
'target_size': [config.target_height, config.target_width],
})
if step_type == 'resize_image':
config = preprocessor_step_config.resize_image
method = RESIZE_METHOD_MAP[config.method]
return (preprocessor.resize_image,
{
'new_height': config.new_height,
'new_width': config.new_width,
'method': method
})
if step_type == 'random_self_concat_image':
config = preprocessor_step_config.random_self_concat_image
return (preprocessor.random_self_concat_image, {
'concat_vertical_probability': config.concat_vertical_probability,
'concat_horizontal_probability': config.concat_horizontal_probability
})
if step_type == 'ssd_random_crop':
config = preprocessor_step_config.ssd_random_crop
if config.operations:
min_object_covered = [op.min_object_covered for op in config.operations]
aspect_ratio_range = [(op.min_aspect_ratio, op.max_aspect_ratio)
for op in config.operations]
area_range = [(op.min_area, op.max_area) for op in config.operations]
overlap_thresh = [op.overlap_thresh for op in config.operations]
clip_boxes = [op.clip_boxes for op in config.operations]
random_coef = [op.random_coef for op in config.operations]
return (preprocessor.ssd_random_crop,
{
'min_object_covered': min_object_covered,
'aspect_ratio_range': aspect_ratio_range,
'area_range': area_range,
'overlap_thresh': overlap_thresh,
'clip_boxes': clip_boxes,
'random_coef': random_coef,
})
return (preprocessor.ssd_random_crop, {})
if step_type == 'autoaugment_image':
config = preprocessor_step_config.autoaugment_image
return (preprocessor.autoaugment_image, {
'policy_name': config.policy_name,
})
if step_type == 'drop_label_probabilistically':
config = preprocessor_step_config.drop_label_probabilistically
return (preprocessor.drop_label_probabilistically, {
'dropped_label': config.label,
'drop_probability': config.drop_probability,
})
if step_type == 'remap_labels':
config = preprocessor_step_config.remap_labels
return (preprocessor.remap_labels, {
'original_labels': config.original_labels,
'new_label': config.new_label
})
if step_type == 'ssd_random_crop_pad':
config = preprocessor_step_config.ssd_random_crop_pad
if config.operations:
min_object_covered = [op.min_object_covered for op in config.operations]
aspect_ratio_range = [(op.min_aspect_ratio, op.max_aspect_ratio)
for op in config.operations]
area_range = [(op.min_area, op.max_area) for op in config.operations]
overlap_thresh = [op.overlap_thresh for op in config.operations]
clip_boxes = [op.clip_boxes for op in config.operations]
random_coef = [op.random_coef for op in config.operations]
min_padded_size_ratio = [tuple(op.min_padded_size_ratio)
for op in config.operations]
max_padded_size_ratio = [tuple(op.max_padded_size_ratio)
for op in config.operations]
pad_color = [(op.pad_color_r, op.pad_color_g, op.pad_color_b)
for op in config.operations]
return (preprocessor.ssd_random_crop_pad,
{
'min_object_covered': min_object_covered,
'aspect_ratio_range': aspect_ratio_range,
'area_range': area_range,
'overlap_thresh': overlap_thresh,
'clip_boxes': clip_boxes,
'random_coef': random_coef,
'min_padded_size_ratio': min_padded_size_ratio,
'max_padded_size_ratio': max_padded_size_ratio,
'pad_color': pad_color,
})
return (preprocessor.ssd_random_crop_pad, {})
if step_type == 'ssd_random_crop_fixed_aspect_ratio':
config = preprocessor_step_config.ssd_random_crop_fixed_aspect_ratio
if config.operations:
min_object_covered = [op.min_object_covered for op in config.operations]
area_range = [(op.min_area, op.max_area) for op in config.operations]
overlap_thresh = [op.overlap_thresh for op in config.operations]
clip_boxes = [op.clip_boxes for op in config.operations]
random_coef = [op.random_coef for op in config.operations]
return (preprocessor.ssd_random_crop_fixed_aspect_ratio,
{
'min_object_covered': min_object_covered,
'aspect_ratio': config.aspect_ratio,
'area_range': area_range,
'overlap_thresh': overlap_thresh,
'clip_boxes': clip_boxes,
'random_coef': random_coef,
})
return (preprocessor.ssd_random_crop_fixed_aspect_ratio, {})
if step_type == 'ssd_random_crop_pad_fixed_aspect_ratio':
config = preprocessor_step_config.ssd_random_crop_pad_fixed_aspect_ratio
kwargs = {}
aspect_ratio = config.aspect_ratio
if aspect_ratio:
kwargs['aspect_ratio'] = aspect_ratio
min_padded_size_ratio = config.min_padded_size_ratio
if min_padded_size_ratio:
if len(min_padded_size_ratio) != 2:
raise ValueError('min_padded_size_ratio should have 2 elements if set!')
kwargs['min_padded_size_ratio'] = tuple(min_padded_size_ratio)
max_padded_size_ratio = config.max_padded_size_ratio
if max_padded_size_ratio:
if len(max_padded_size_ratio) != 2:
raise ValueError('max_padded_size_ratio should have 2 elements if set!')
kwargs['max_padded_size_ratio'] = tuple(max_padded_size_ratio)
if config.operations:
kwargs['min_object_covered'] = [op.min_object_covered
for op in config.operations]
kwargs['aspect_ratio_range'] = [(op.min_aspect_ratio, op.max_aspect_ratio)
for op in config.operations]
kwargs['area_range'] = [(op.min_area, op.max_area)
for op in config.operations]
kwargs['overlap_thresh'] = [op.overlap_thresh for op in config.operations]
kwargs['clip_boxes'] = [op.clip_boxes for op in config.operations]
kwargs['random_coef'] = [op.random_coef for op in config.operations]
return (preprocessor.ssd_random_crop_pad_fixed_aspect_ratio, kwargs)
if step_type == 'random_square_crop_by_scale':
config = preprocessor_step_config.random_square_crop_by_scale
return preprocessor.random_square_crop_by_scale, {
'scale_min': config.scale_min,
'scale_max': config.scale_max,
'max_border': config.max_border,
'num_scales': config.num_scales
}
if step_type == 'random_scale_crop_and_pad_to_square':
config = preprocessor_step_config.random_scale_crop_and_pad_to_square
return preprocessor.random_scale_crop_and_pad_to_square, {
'scale_min': config.scale_min,
'scale_max': config.scale_max,
'output_size': config.output_size,
}
if step_type == 'random_jitter_boxes':
config = preprocessor_step_config.random_jitter_boxes
kwargs = get_random_jitter_kwargs(config)
return preprocessor.random_jitter_boxes, kwargs
raise ValueError('Unknown preprocessing step.')
|
apache-2.0
| 2,135,707,827,955,755,500
| 39.979684
| 80
| 0.640961
| false
| 3.832383
| true
| false
| false
|
tobegit3hub/deep_recommend_system
|
http_service/restful_server/settings.py
|
1
|
3216
|
"""
Django settings for restful_server project.
Generated by 'django-admin startproject' using Django 1.9.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '5jeg*%=e7r7*=z*f-5+uz(l3wbe3&1_#306wc6iry!u4shd7)-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'restful_server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'restful_server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
apache-2.0
| 7,750,119,693,354,985,000
| 26.02521
| 83
| 0.684701
| false
| 3.526316
| false
| false
| false
|
AstroTech/workshop-python
|
network/src/imap-gmail.py
|
1
|
1379
|
import getpass
import imaplib
import email
from pprint import pprint
from quopri import decodestring
from datetime import datetime
USERNAME = getpass.getuser()
PASSWORD = getpass.getpass()
HOST = 'imap.gmail.com'
PORT = 993
imap = imaplib.IMAP4_SSL(HOST, PORT)
imap.login(USERNAME, PASSWORD)
imap.select('INBOX')
def get_str(text):
return decodestring(text).decode()
def get_date(text):
try:
return datetime.strptime(headers['Date'], '%a, %d %b %Y %H:%M:%S %z')
except ValueError:
return text
def get_body(msg):
type = msg.get_content_maintype()
if type == 'multipart':
for part in msg.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload(decode=True).decode('utf-8')
elif type == 'text':
return msg.get_payload(decode=True).decode('utf-8')
status, data = imap.search(None, 'ALL')
# status: OK
# data: [b'1 2 3 4 ...']
for num in data[0].split():
status, data = imap.fetch(num, '(RFC822)')
mail = email.message_from_string(data[0][1].decode())
headers = dict(mail._headers)
mail = {
'to': get_str(headers['To']),
'sender': get_str(headers['From']),
'subject': get_str(headers['Subject']),
'date': get_date(headers['Date']),
'body': get_body(mail)
}
pprint(mail)
imap.close()
imap.logout()
|
mit
| 7,039,509,036,235,123,000
| 22.372881
| 77
| 0.618564
| false
| 3.275534
| false
| false
| false
|
uhuramedia/Havel
|
HavelCMS/admin.py
|
1
|
7900
|
import datetime
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
from django.core import urlresolvers
from django.db import models
from django.utils.importlib import import_module
from django.utils.translation import ugettext_lazy as _
from feincms.admin.tree_editor import TreeEditor as _feincms_tree_editor
from mptt.admin import MPTTModelAdmin
from mptt.forms import MPTTAdminForm, TreeNodeChoiceField
from HavelCMS.models import ResourceProperty, Page, Weblink, Resource, \
ResourceTranslation, ResourceCollection, ResourceCollectionItem, File
from contrib.attachments.admin import FileLinkInline, LinkInline
def get_class_from_string(str):
path = str
i = path.rfind('.')
module, attr = path[:i], path[i + 1:]
try:
mod = import_module(module)
return getattr(mod, attr)
except ImportError, e:
raise ImproperlyConfigured('Error importing module %s: "%s"' % (module, e))
class ResourcePropertyInline(admin.TabularInline):
model = ResourceProperty
extra = 0
class FeinCMSModelAdmin(_feincms_tree_editor):
"""
A ModelAdmin to add changelist tree view and editing capabilities.
Requires FeinCMS to be installed.
"""
form = MPTTAdminForm
def _actions_column(self, obj):
actions = super(FeinCMSModelAdmin, self)._actions_column(obj)
actions.insert(0,
u'<a href="%s?%s=%s" title="%s">%s</a>' % (
urlresolvers.reverse('admin:HavelCMS_page_add'),
self.model._mptt_meta.parent_attr,
obj.pk,
_('+Page'),
_('+Page')))
actions.insert(0,
u'<a href="%s?%s=%s" title="%s">%s</a>' % (
urlresolvers.reverse('admin:HavelCMS_weblink_add'),
self.model._mptt_meta.parent_attr,
obj.pk,
_('+Weblink'),
_('+Weblink')))
return actions
def delete_selected_tree(self, modeladmin, request, queryset):
"""
Deletes multiple instances and makes sure the MPTT fields get recalculated properly.
(Because merely doing a bulk delete doesn't trigger the post_delete hooks.)
"""
n = 0
for obj in queryset:
obj.delete()
n += 1
self.message_user(request, _("Successfully deleted %s items.") % n)
def get_actions(self, request):
actions = super(FeinCMSModelAdmin, self).get_actions(request)
if 'delete_selected' in actions:
actions['delete_selected'] = (self.delete_selected_tree, 'delete_selected', _("Delete selected %(verbose_name_plural)s"))
return actions
def page_or_else(resource, code):
v = resource.get_translated_version(code)
if v is None:
return "-"
return v
class ResourceAdmin(FeinCMSModelAdmin):
list_display = ('__unicode__',
'title_link',
'is_published',
'in_menu',
'translation_pool',
'language',
'author')
list_filter = ('is_published', 'in_menu', 'author', 'language')
search_fields = ('title',)
inlines = (ResourcePropertyInline,)
actions = ('make_published', 'make_unpublished', 'link')
prepopulated_fields = {'slug': ('title',)}
ordering = ['tree_id', 'lft']
def __init__(self, *args, **kwargs):
super(ResourceAdmin, self).__init__(*args, **kwargs)
self.list_display_links = (None,)
def has_add_permission(self, request, obj=None):
return False
def title_link(self, obj):
return u'<a href="%s">%s</a>' % (obj.get_edit_link(),
obj.content_type)
title_link.allow_tags = True
title_link.short_description = _("Edit")
def make_do(self, request, queryset, label, *args, **make):
rows_updated = queryset.update(**make)
if rows_updated == 1:
message_bit = _("1 resource was")
else:
message_bit = _("%s resources were" % rows_updated)
self.message_user(request, _("%(num)s successfully %(action)s." % {'num': message_bit, 'action': label}))
def make_published(self, request, queryset):
return self.make_do(request, queryset, _("marked as published"),
is_published=True, published=datetime.datetime.now())
make_published.short_description = _("Mark selected resources as published")
def make_unpublished(self, request, queryset):
return self.make_do(request, queryset, _("marked as unpublished"),
is_published=False, published=None)
make_unpublished.short_description = _("Mark selected resources as unpublished")
def link(self, request, queryset):
rt = ResourceTranslation.objects.create()
for obj in queryset:
obj.translation_pool = rt
obj.save()
link.short_description = _("Link these resources as translation")
admin.site.register(Resource, ResourceAdmin)
class PageAdmin(FeinCMSModelAdmin):
list_display = ('mptt_title',
'is_published',
'in_menu',
'slug',
'language',
'author')
ordering = ('tree_id', 'lft')
list_filter = ('is_published', 'in_menu', 'author', 'language')
inlines = (ResourcePropertyInline, LinkInline, FileLinkInline)
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': ('parent', ('title', 'slug'), 'language', 'text', 'template')
}),
('Settings', {
'fields': ('in_menu', 'is_published', 'show_title')
}),
('Timing', {
'classes': ('collapse',),
'fields': ('published', 'unpublished')
}),
('Other', {
'classes': ('collapse',),
'fields': ('menu_title', 'meta_summary', 'noindex')
}),
)
def __init__(self, *args, **kwargs):
super(PageAdmin, self).__init__(*args, **kwargs)
setting = "RESOURCES_%s_TEXTWIDGET" % self.model._meta.model_name.upper()
if hasattr(settings, setting):
self.formfield_overrides = {
models.TextField: {'widget': get_class_from_string(getattr(settings, setting)) }
}
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
setting = "RESOURCES_%s_INLINES" % self.model._meta.model_name.upper()
if hasattr(settings, setting):
self.inlines = list(self.inlines)
for i in getattr(settings, setting):
self.inlines.append(get_class_from_string(i))
def save_model(self, request, obj, form, change):
if getattr(obj, 'author', None) is None:
obj.author = request.user
obj.save()
admin.site.register(Page, PageAdmin)
class WeblinkAdmin(ResourceAdmin):
def __init__(self, *args, **kwargs):
super(WeblinkAdmin, self).__init__(*args, **kwargs)
setting = "RESOURCES_%s_INLINES" % self.model._meta.model_name.upper()
if hasattr(settings, setting):
self.inlines = list(self.inlines)
for i in getattr(settings, setting):
self.inlines.append(get_class_from_string(i))
def has_add_permission(self, request, obj=None):
return True
admin.site.register(Weblink, WeblinkAdmin)
class ResourceCollectionItemInline(admin.TabularInline):
model = ResourceCollectionItem
class ResourceCollectionAdmin(admin.ModelAdmin):
inlines = (ResourceCollectionItemInline,)
admin.site.register(ResourceCollection, ResourceCollectionAdmin)
admin.site.register(File)
|
bsd-3-clause
| -1,609,924,798,894,494,700
| 34.426009
| 133
| 0.599114
| false
| 4.059609
| false
| false
| false
|
lecovi/reveal.js
|
archivos/encapsulamiento_property.py
|
1
|
1041
|
class Encapsulamiento:
""" Esta clase tiene 3 atributos y 3 métodos propios.
>>> # El atributo privado es accesible a través de una Propiedad.
>>> x = Encapsulamiento()
>>> x.atributo_publico
este atributo es privado.
>>> x._atributo_semi_privado
este atributo es 'casi' privado.
>>> x.atributo_privado
este atributo es privado.
"""
def __init__(self):
self.__atributo_privado = "este atributo es privado."
self._atributo_semi_privado = "este atributo es 'casi' privado."
self.atributo_publico = "este atributo es público."
def publico(self):
return "Este es un método Público"
def _semi_privado(self):
return "Este es un método Semi Privado"
def __privado(self):
return "Este es un método Privado"
@property
def atributo_privado(self):
return self.__atributo_privado
@atributo_privado.setter
def atributo_privado(self, valor):
self.__atributo_privado = valor
|
mit
| -8,376,594,411,097,522,000
| 30.333333
| 73
| 0.619923
| false
| 2.988439
| false
| false
| false
|
pashinin-com/pashinin.com
|
src/ege/migrations/0006_auto_20170217_1608.py
|
1
|
1323
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 13:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('edu', '0007_auto_20170217_1434'),
('ege', '0005_auto_20170129_0117'),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.IntegerField(choices=[(0, '1 единственная задача'), (1, '1 задача из N на выбор')], default=0, verbose_name='Нужно решить')),
('order', models.IntegerField(verbose_name='Номер задачи в экзамене, например от 1 до 27')),
('tags', models.ManyToManyField(help_text='Все тэги, которые подходят для этой задачи в этом экзамене', related_name='ege_tasks', to='edu.Category', verbose_name='Tags')),
],
),
migrations.AddField(
model_name='ege',
name='tasks',
field=models.ManyToManyField(blank=True, related_name='exams', to='ege.Task'),
),
]
|
gpl-3.0
| 364,146,395,202,600,960
| 38.966667
| 187
| 0.593828
| false
| 2.93154
| false
| false
| false
|
tweakyllama/Arduino-Projects
|
I2C/src/raspberry.py
|
1
|
1938
|
import time
import smbus
class I2C(object):
@staticmethod
def getPiVersion():
"Gets the version number of the Pi board"
try:
with open('/proc/cpuinfo', 'r') as infile:
for line in infile:
# Match a line of the form "Revision : 0002" while ignoring extra
# info in front of the revsion (like 1000 when the Pi was over-volted).
match = re.match('Revision\s+:\s+.*(\w{4})$', line)
if match and match.group(1) in ['0000', '0002', '0003']:
# Return revision 1 if revision ends with 0000, 0002 or 0003.
return 1
elif match:
# Assume revision 2 if revision ends with any other 4 chars.
return 2
return 0
except:
return 0
@staticmethod
def getI2CBusNumber():
return 1 if I2C.getPiVersion() > 1 else 0
def __init__(self, address, busnum = -1, debug = False):
self.address = address
# By default, the correct I2C bus is auto-detected using /proc/cpuinfo
# Alternatively, you can hard-code the bus version below:
# self.bus = smbus.SMBus(0); # Force I2C0 (early 256MB Pi's)
# self.bus = smbus.SMBus(1); # Force I2C1 (512MB Pi's)
self.bus = smbus.SMBus(busnum if busnum >= 0 else Adafruit_I2C.getPiI2CBusNumber())
self.debug = debug
def reverseByteOrder(self, data):
"Reverses the byte order of an int (16-bit) or a long (32-bit)"
# Courtesy Vishal Sapre
byteCount = len(hex(data)[2:].replace('L','')[::2])
val = 0
for i in range(byteCount):
val = (val << 8) | (data & 0xff)
data >>= 8
return val
def errMsg(self):
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
def write8(self, reg, value):
"Writes an 8-bit value to specified register/address"
try:
self.bus.write_byte_data(self.address, reg, value)
except IOError, err:
return self.errMsg()
|
gpl-2.0
| 7,011,148,986,396,381,000
| 32.413793
| 87
| 0.615067
| false
| 3.388112
| false
| false
| false
|
joshbuddy/crew
|
pitcrew/tasks/ensure/aws/route53/has_records.py
|
1
|
1339
|
import json
import asyncio
from pitcrew import task
@task.arg("zone_id", desc="The zone id to operate on", type=str)
@task.arg("records", desc="A list of records to ensure are set", type=list)
class HasRecords(task.BaseTask):
"""Ensure route53 has the set of records"""
async def verify(self):
json_out = await self.sh(
f"aws route53 list-resource-record-sets --hosted-zone-id {self.params.esc_zone_id}"
)
out = json.loads(json_out)
existing_record_sets = out["ResourceRecordSets"]
for record in self.params.records:
assert record in existing_record_sets, "cannot find record"
async def run(self):
changes = map(
lambda c: {"Action": "UPSERT", "ResourceRecordSet": c}, self.params.records
)
change_batch = {"Changes": list(changes)}
change_id = json.loads(
await self.sh(
f"aws route53 change-resource-record-sets --hosted-zone-id {self.params.esc_zone_id} --change-batch {self.esc(json.dumps(change_batch))}"
)
)["ChangeInfo"]["Id"]
while (
json.loads(
await self.sh(f"aws route53 get-change --id {self.esc(change_id)}")
)["ChangeInfo"]["Status"]
== "PENDING"
):
await asyncio.sleep(5)
|
mit
| 3,340,604,327,670,191,000
| 36.194444
| 153
| 0.589246
| false
| 3.729805
| false
| false
| false
|
niavok/perroquet
|
perroquetlib/repository/exercise_repository_exercise.py
|
1
|
15308
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2011 Frédéric Bertolus.
# Copyright (C) 2009-2011 Matthieu Bizien.
#
# This file is part of Perroquet.
#
# Perroquet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Perroquet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Perroquet. If not, see <http://www.gnu.org/licenses/>.
import errno
import gettext
import logging
import os
import tarfile
import tempfile
import thread
import urllib2
from threading import Lock
from xml.dom.minidom import getDOMImplementation, parse
from perroquetlib.debug import defaultLoggingHandler, defaultLoggingLevel
_ = gettext.gettext
class ExerciseRepositoryExercise:
def __init__(self):
self.id = "no-id"
self.name = "No name"
self.description = ""
self.mutexInstalling = Lock()
self.downloadPercent = 0
self.state = "none"
self.wordsCount = 0
self.translationList = []
self.version = None
self.logger = logging.Logger("ExerciseRepositoryExercise")
self.logger.setLevel(defaultLoggingLevel)
self.logger.addHandler(defaultLoggingHandler)
self.licence = _("Not specified")
self.author = _("Not specified")
self.authorWebsite = _("Not specified")
self.authorContact = _("Not specified")
self.packager = _("Not specified")
self.packagerWebsite = _("Not specified")
self.packagerContact = _("Not specified")
self.language = _("Not specified")
self.mediaType = _("Not specified")
self.filePath = _("Not specified")
self.system = False
def set_system(self, system):
"""Define if the exo is a system exo or only a local one
A system exo store common data in a system directory and only the
progress in the local directory
"""
self.system = system
def is_installed(self):
return os.path.isfile(self.get_template_path())
def is_used(self):
return os.path.isfile(self.get_instance_path())
def is_done(self):
return os.path.isfile(self.get_done_path())
def start_install(self):
self.mutexInstalling.acquire()
self.canceled = False
self.downloadPercent = 0
self.play_thread_id = thread.start_new_thread(self.install_thread, ())
def cancel_install(self):
self.canceled = True
def wait_install_end(self):
self.mutexInstalling.acquire()
self.mutexInstalling.release()
def download(self):
f = urllib2.urlopen(self.get_file_path())
_, tempPath = tempfile.mkstemp("", "perroquet-");
wf = open(tempPath, 'w+b')
size = f.info().get('Content-Length')
if size is None:
size = 0
else:
size = int(size)
count = 0
sizeToRead = 50000
while not self.canceled:
data = f.read(sizeToRead)
wf.write(data)
if len(data) != sizeToRead:
break;
count += sizeToRead
self.downloadPercent = (round((float(count) / float(size)) * 100))
self.downloading = False
return tempPath
def get_download_percent(self):
return self.downloadPercent
def get_state(self):
#available
#downloading
#installing
#installed
#corrupted
#canceled
#removing
#used
#done
if self.state == "none":
if self.is_done():
self.state = "done"
elif self.is_used():
self.state = "used"
elif self.is_installed():
self.state = "installed"
else:
self.state = "available"
return self.state
def set_state(self, state):
oldState = self.state
self.state = state
self.notifyStateChange(oldState, self.callbackData)
def set_state_change_callback(self, callback, callbackData):
self.notifyStateChange = callback
self.callbackData = callbackData
def install_thread(self):
self.set_state("downloading")
tmpPath = self.download()
if self.canceled:
self.logger.info("remove temp file")
self.set_state("canceled")
os.remove(tmpPath)
else:
self.set_state("installing")
tar = tarfile.open(tmpPath)
outPath = self.get_local_path()
try:
os.makedirs(outPath)
except OSError, (ErrorNumber, ErrorMessage): # Python <=2.5
if ErrorNumber == errno.EEXIST:
pass
else: raise
tar.extractall(outPath)
tar.close()
os.remove(tmpPath)
if self.is_installed():
self.set_state("installed")
else:
self.set_state("corrupted")
self.mutexInstalling.release()
def get_template_path(self):
return os.path.join(self.get_local_path(), "template.perroquet")
def get_instance_path(self):
return os.path.join(self.get_personnal_local_path(), "instance.perroquet")
def get_done_path(self):
return os.path.join(self.get_personnal_local_path(), "done.perroquet")
def set_name(self, name):
self.name = name
def get_name(self):
return self.name
def set_id(self, id):
self.id = id
def get_id(self):
return self.id
def set_description(self, description):
self.description = description
def get_description(self):
return self.description
def set_licence(self, licence):
self.licence = licence
def get_licence(self):
return self.licence
def set_language(self, language):
self.language = language
def get_language(self):
return self.language
def set_media_type(self, mediaType):
self.mediaType = mediaType
def get_media_type(self):
return self.mediaType
def set_version(self, version):
self.version = version
def get_version(self):
return self.version
def set_author(self, author):
self.author = author
def get_author(self):
return self.author
def set_words_count(self, wordsCount):
self.wordsCount = wordsCount
def get_words_count(self):
return self.wordsCount
def set_author_website(self, authorWebsite):
self.authorWebsite = authorWebsite
def get_author_website(self):
return self.authorWebsite
def set_author_contact(self, authorContact):
self.authorContact = authorContact
def get_author_contact(self):
return self.authorContact
def set_packager(self, packager):
self.packager = packager
def get_packager(self):
return self.packager
def set_packager_website(self, packagerWebsite):
self.packagerWebsite = packagerWebsite
def get_packager_website(self):
return self.packagerWebsite
def set_packager_contact(self, packagerContact):
self.packagerContact = packagerContact
def get_packager_contact(self):
return self.packagerContact
def set_file_path(self, filePath):
self.filePath = filePath
def get_file_path(self):
return self.filePath
def set_translations_list(self, translationList):
self.translationList = translationList
def get_translations_list(self):
return self.translationList
def set_parent(self, parent):
self.parent = parent
def get_local_path(self):
versioned_id = None
if self.version is not None:
versioned_id = self.id + "_" + self.version
else:
versioned_id = self.id
return os.path.join(self.parent.get_local_path(), versioned_id)
def get_personnal_local_path(self):
versioned_id = None
if self.version is not None:
versioned_id = self.id + "_" + self.version
else:
versioned_id = self.id
return os.path.join(self.parent.get_personal_local_path(), versioned_id)
def parse_description(self, xml_exercise):
self.set_name(self._get_text(xml_exercise.getElementsByTagName("name")[0].childNodes))
self.set_id(self._get_text(xml_exercise.getElementsByTagName("id")[0].childNodes))
self.set_description(self._get_text(xml_exercise.getElementsByTagName("description")[0].childNodes))
self.set_licence(self._get_text(xml_exercise.getElementsByTagName("licence")[0].childNodes))
self.set_language(self._get_text(xml_exercise.getElementsByTagName("language")[0].childNodes))
self.set_media_type(self._get_text(xml_exercise.getElementsByTagName("media_type")[0].childNodes))
self.set_version(self._get_text(xml_exercise.getElementsByTagName("exercise_version")[0].childNodes))
self.set_author(self._get_text(xml_exercise.getElementsByTagName("author")[0].childNodes))
self.set_author_website(self._get_text(xml_exercise.getElementsByTagName("author_website")[0].childNodes))
self.set_author_contact(self._get_text(xml_exercise.getElementsByTagName("author_contact")[0].childNodes))
self.set_packager(self._get_text(xml_exercise.getElementsByTagName("packager")[0].childNodes))
self.set_packager_website(self._get_text(xml_exercise.getElementsByTagName("packager_website")[0].childNodes))
self.set_packager_contact(self._get_text(xml_exercise.getElementsByTagName("packager_contact")[0].childNodes))
if len(xml_exercise.getElementsByTagName("words_count")) > 0:
self.set_words_count(self._get_text(xml_exercise.getElementsByTagName("words_count")[0].childNodes))
if len(xml_exercise.getElementsByTagName("file")) > 0:
self.set_file_path(self._get_text(xml_exercise.getElementsByTagName("file")[0].childNodes))
if len(xml_exercise.getElementsByTagName("translations")) > 0:
xml_translations = xml_exercise.getElementsByTagName("translations")[0]
translationList = []
for xml_translation in xml_translations.getElementsByTagName("translation"):
translationList.append(self._get_text(xml_translation.childNodes))
self.set_translations_list(translationList)
def generate_description(self):
self._generate_description()
def _generate_description(self):
if not os.path.isdir(self.get_local_path()):
try:
os.makedirs(self.get_local_path())
except OSError, (ErrorNumber, ErrorMessage): # Python <=2.5
if ErrorNumber == 666: #EEXIST ???
pass
else: raise
impl = getDOMImplementation()
newdoc = impl.createDocument(None, "perroquet_exercise", None)
root_element = newdoc.documentElement
# Name
xml_name = newdoc.createElement("name")
xml_name.appendChild(newdoc.createTextNode(self.get_name()))
root_element.appendChild(xml_name)
# Id
xml_id = newdoc.createElement("id")
xml_id.appendChild(newdoc.createTextNode(self.get_id()))
root_element.appendChild(xml_id)
# Description
xml_description = newdoc.createElement("description")
xml_description.appendChild(newdoc.createTextNode(self.get_description()))
root_element.appendChild(xml_description)
# Words count
xml_version = newdoc.createElement("words_count")
xml_version.appendChild(newdoc.createTextNode(str(self.get_words_count())))
root_element.appendChild(xml_version)
# Version
xml_version = newdoc.createElement("exercise_version")
xml_version.appendChild(newdoc.createTextNode(self.get_version()))
root_element.appendChild(xml_version)
# Licence
xml_node = newdoc.createElement("licence")
xml_node.appendChild(newdoc.createTextNode(self.get_licence()))
root_element.appendChild(xml_node)
# Language
xml_node = newdoc.createElement("language")
xml_node.appendChild(newdoc.createTextNode(self.get_language()))
root_element.appendChild(xml_node)
# Media type
xml_node = newdoc.createElement("media_type")
xml_node.appendChild(newdoc.createTextNode(self.get_media_type()))
root_element.appendChild(xml_node)
# author
xml_node = newdoc.createElement("author")
xml_node.appendChild(newdoc.createTextNode(self.get_author()))
root_element.appendChild(xml_node)
# author website
xml_node = newdoc.createElement("author_website")
xml_node.appendChild(newdoc.createTextNode(self.get_author_website()))
root_element.appendChild(xml_node)
# author contact
xml_node = newdoc.createElement("author_contact")
xml_node.appendChild(newdoc.createTextNode(self.get_author_contact()))
root_element.appendChild(xml_node)
# packager
xml_node = newdoc.createElement("packager")
xml_node.appendChild(newdoc.createTextNode(self.get_packager()))
root_element.appendChild(xml_node)
# packager website
xml_node = newdoc.createElement("packager_website")
xml_node.appendChild(newdoc.createTextNode(self.get_packager_website()))
root_element.appendChild(xml_node)
# packager contact
xml_node = newdoc.createElement("packager_contact")
xml_node.appendChild(newdoc.createTextNode(self.get_packager_contact()))
root_element.appendChild(xml_node)
# template path
xml_node = newdoc.createElement("template")
xml_node.appendChild(newdoc.createTextNode(self.get_template_path()))
root_element.appendChild(xml_node)
# translation
#TODO
xml_string = newdoc.toprettyxml()
xml_string = xml_string.encode('utf8')
repoDescriptionPath = os.path.join(self.get_local_path(), "exercise.xml")
f = open(repoDescriptionPath, 'w')
f.write(xml_string)
f.close()
def init_from_path(self, exercisePath):
exerciseDescriptionPath = os.path.join(exercisePath, "exercise.xml")
if os.path.isfile(exerciseDescriptionPath):
f = open(exerciseDescriptionPath, 'r')
dom = parse(f)
self.parse_description(dom)
else:
self.id = os.path.basename(exercisePath)
self.name = self.id
self.description = gettext.gettext("Imported exercise")
def _get_text(self, nodelist):
rc = ""
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
rc = rc.strip()
return rc
|
gpl-3.0
| 3,889,338,079,324,293,000
| 32.346405
| 118
| 0.633281
| false
| 4.052423
| false
| false
| false
|
slub/vk2-georeference
|
georeference/utils/process/mapfile.py
|
1
|
4870
|
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Jacob Mendt
Created on 04.08.15
@author: mendt
'''
import os
import uuid
from mapscript import MS_IMAGEMODE_RGB, MS_OFF, MS_PIXELS, MS_LAYER_RASTER, layerObj, mapObj, MS_ON, outputFormatObj
from georeference.utils.exceptions import MapfileBindingInitalizationException
OutputFormat_JPEG = {"NAME":"jpeg","MIMETYPE":"image/jpeg","DRIVER":"AGG/JPEG","EXTENSION":"jpg",
"IMAGEMODE":MS_IMAGEMODE_RGB,"TRANSPARENT":MS_OFF}
Metadata = {"wms_srs":"EPSG:4326","wms_onlineresource":"http://localhost/cgi-bin/mapserv?",
"wms_enable_request":"*","wms_titel":"Temporary Messtischblatt WMS"}
def createMapfile(layername, datapath, georefTargetSRS, mapfileTemplate, mapfileDir, mapfileParams):
""" Function creates a temporary mapfile
:type layername: str
:type datapath: str
:type georefTargetSRS: int
:type mapfileTemplate: str
:type mapfileDir: str
:type mapfileParams: str """
try:
mapfile = MapfileBinding(mapfileTemplate,mapfileDir, **mapfileParams)
mapfile.addLayerToMapfile(datapath, layername, georefTargetSRS)
wms = mapfile.saveMapfile()
return wms
except:
raise
class MapfileBinding:
def __init__(self, src_mapfilePath, dest_mapfileFolder, **kwargs):
# init wms service name
self.servicename= "wms_%s.map"%uuid.uuid4()
# init the mapfile based on a template file
self.mapfilepath = os.path.join(dest_mapfileFolder, self.servicename)
self.__initMapfile__(src_mapfilePath, self.mapfilepath)
if len(kwargs) > 0:
self.__initMapfileParameter__(kwargs)
else:
raise MapfileBindingInitalizationException("Missing mapfile information!")
def __initMapfile__(self, src_mapfilePath, dest_mapfilePath):
mapfile = mapObj(src_mapfilePath)
self.saveMapfile(mapfile)
self.mapfile = mapObj(self.mapfilepath)
def __initMapfileParameter__(self, kwargs):
"""
Set the option parameter for the map element
"""
#generic mapfile options
self.mapfile.units = MS_PIXELS
self.mapfile.status = MS_ON
#if "OUTPUTFORMAT" in kwargs:
# self.__addOutputFormat__(kwargs["OUTPUTFORMAT"])
if "METADATA" in kwargs:
self.__addMetadata__(kwargs["METADATA"])
def __addMetadata__(self, dictMD):
self.wms_url = dictMD["wms_onlineresource"]+"map=%s"%self.mapfilepath
for key in dictMD:
if key is "wms_onlineresource":
self.mapfile.web.metadata.set(key,self.wms_url)
else:
self.mapfile.web.metadata.set(key,dictMD[key])
def __addOutputFormat__(self, dictOutFormat):
"""
Function adds a outputformat object to the mapfile.
@param dictOutFormat: Represents a dictionary with the outputformat arguments. It should
contains the keys:
@param NAME:
@param MIMETYPE:
@param DRIVER:
@param EXTENSION:
@param IMAGEMODE:
@param TRANSPARENT:
"""
# creates a OutputFormatObject and adds the parameter to it
if "DRIVER" in dictOutFormat:
outFormatObj = outputFormatObj(dictOutFormat["DRIVER"])
else:
raise MapfileBindingInitalizationException("Missing Driver for OutputFormat Element")
if "NAME" in dictOutFormat:
outFormatObj.name = dictOutFormat["NAME"]
if "MIMETYPE" in dictOutFormat:
outFormatObj.mimetype = dictOutFormat["MIMETYPE"]
if "EXTENSION" in dictOutFormat:
outFormatObj.extension = dictOutFormat["EXTENSION"]
if "IMAGEMODE" in dictOutFormat:
outFormatObj.imagemode = dictOutFormat["IMAGEMODE"]
if "TRANSPARENT" in dictOutFormat:
outFormatObj.transparent = dictOutFormat["TRANSPARENT"]
# adds the OutputFormatObject to the mapfile
self.mapfile.appendOutputFormat(outFormatObj)
def saveMapfile(self, mapfile = None):
if mapfile != None and isinstance(mapfile,mapObj):
mapfile.save(self.mapfilepath)
return None
else:
self.mapfile.save(self.mapfilepath)
return self.mapfile.getMetaData("wms_onlineresource")
def addLayerToMapfile(self, dataPath, layerName,georefTargetSRS):
""" Function adds a layer to a mapfile
:type dataPath: str
:type layerName: str
:type georefTargetSRS: int """
layer = layerObj()
layer.data = dataPath
layer.type = MS_LAYER_RASTER
layer.name = layerName
layer.units = MS_PIXELS
layer.status = MS_OFF
layer.setProjection("init=epsg:%s"%georefTargetSRS)
self.mapfile.insertLayer(layer)
|
gpl-3.0
| 5,549,427,881,581,592,000
| 35.074074
| 116
| 0.641684
| false
| 3.858954
| false
| false
| false
|
holgerd77/django-public-project
|
public_project/south_migrations/0023_auto__del_field_siteconfig_navi_link_color.py
|
1
|
21861
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'SiteConfig.navi_link_color'
db.delete_column(u'public_project_siteconfig', 'navi_link_color')
def backwards(self, orm):
# Adding field 'SiteConfig.navi_link_color'
db.add_column(u'public_project_siteconfig', 'navi_link_color',
self.gf('django.db.models.fields.CharField')(default='#FFFFFF', max_length=7),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'public_project.activitylog': {
'Meta': {'ordering': "['-date']", 'object_name': 'ActivityLog'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
u'public_project.comment': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Comment'},
'activation_hash': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '250'}),
'feedback_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'published_by': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.commentrelation': {
'Meta': {'object_name': 'CommentRelation'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Comment']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'page': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'public_project.document': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'Document'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'document': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'events': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Participant']"}),
'pdf_images_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.event': {
'Meta': {'ordering': "['-date']", 'object_name': 'Event'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'important': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_events'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Participant']"}),
'project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_events'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.image': {
'Meta': {'ordering': "['title']", 'object_name': 'Image'},
'attribution': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'attribution_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.membership': {
'Meta': {'object_name': 'Membership'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'from_participant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_memberships'", 'to': u"orm['public_project.Participant']"}),
'function': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'to_participant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_memberships'", 'to': u"orm['public_project.Participant']"})
},
u'public_project.page': {
'Meta': {'ordering': "['number']", 'object_name': 'Page'},
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.IntegerField', [], {})
},
u'public_project.participant': {
'Meta': {'ordering': "['order', 'name']", 'object_name': 'Participant'},
'belongs_to': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['public_project.Participant']", 'through': u"orm['public_project.Membership']", 'symmetrical': 'False'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '500', 'null': 'True', 'blank': 'True'})
},
u'public_project.projectgoal': {
'Meta': {'ordering': "['order']", 'object_name': 'ProjectGoal'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '100', 'null': 'True', 'blank': 'True'}),
'performance_figure': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'project_goal_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.ProjectGoalGroup']"})
},
u'public_project.projectgoalgroup': {
'Meta': {'object_name': 'ProjectGoalGroup'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project_part': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.ProjectPart']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.projectpart': {
'Meta': {'ordering': "['order', 'name']", 'object_name': 'ProjectPart'},
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '500', 'null': 'True', 'blank': 'True'})
},
u'public_project.question': {
'Meta': {'ordering': "['title']", 'object_name': 'Question'},
'answer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'answered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'documents': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_documents'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Document']"}),
'events': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_questions'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Event']"}),
'explanations': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_questions'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Participant']"}),
'project_parts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_questions'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.ProjectPart']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.researchrequest': {
'Meta': {'ordering': "['-date_added']", 'object_name': 'ResearchRequest'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nr': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'open': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'public_project.researchrequestrelation': {
'Meta': {'object_name': 'ResearchRequestRelation'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'page': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'research_request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.ResearchRequest']"})
},
u'public_project.searchtag': {
'Meta': {'ordering': "['order']", 'object_name': 'SearchTag'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '100', 'null': 'True', 'blank': 'True'})
},
u'public_project.searchtagcacheentry': {
'Meta': {'ordering': "['-num_results']", 'object_name': 'SearchTagCacheEntry'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_results': ('django.db.models.fields.IntegerField', [], {}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['public_project.SearchTag']"})
},
u'public_project.sitecategory': {
'Meta': {'object_name': 'SiteCategory'},
'category': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'documents': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_site_categories'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['public_project.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'public_project.siteconfig': {
'Meta': {'object_name': 'SiteConfig'},
'about_text': ('django.db.models.fields.TextField', [], {'default': "u'About text'"}),
'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'contact_text': ('django.db.models.fields.TextField', [], {'default': "u'This text will be shown on the contact page.'"}),
'footer': ('django.db.models.fields.TextField', [], {'default': "u'This text will be shown in the footer of the site.'"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro_text': ('django.db.models.fields.TextField', [], {'default': "u'This is a project watch website.'"}),
'short_title': ('django.db.models.fields.CharField', [], {'default': "u'ProjectWatch'", 'max_length': '250'}),
'title': ('django.db.models.fields.CharField', [], {'default': "u'ProjectWatch'", 'max_length': '250'}),
'title_color': ('django.db.models.fields.CharField', [], {'default': "'#990000'", 'max_length': '7'})
},
u'public_project.userprofile': {
'Meta': {'object_name': 'UserProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'receive_new_comment_emails': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'public_project.websource': {
'Meta': {'ordering': "['order']", 'object_name': 'WebSource'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['public_project']
|
bsd-3-clause
| -2,835,805,165,672,189,400
| 81.18797
| 227
| 0.558803
| false
| 3.71849
| true
| false
| false
|
neuro-ml/reskit
|
reskit/core.py
|
1
|
21723
|
""" Core classes. """
from sklearn.externals.joblib import Parallel, delayed
from sklearn.model_selection import cross_val_score, cross_val_predict
from sklearn.model_selection import GridSearchCV
from sklearn.metrics.scorer import check_scoring
from sklearn.pipeline import Pipeline
from sklearn.base import TransformerMixin, BaseEstimator
from collections import OrderedDict
from itertools import product
from pandas import DataFrame
from pickle import dump, load
from numpy import mean, std, hstack, vstack, zeros, array
from time import time
import os
class Pipeliner(object):
"""
An object which allows you to test different data preprocessing
pipelines and prediction models at once.
You will need to specify a name of each preprocessing and prediction
step and possible objects performing each step. Then Pipeliner will
combine these steps to different pipelines, excluding forbidden
combinations; perform experiments according to these steps and present
results in convenient csv table. For example, for each pipeline's
classifier, Pipeliner will grid search on cross-validation to find the best
classifier's parameters and report metric mean and std for each tested
pipeline. Pipeliner also allows you to cache interim calculations to
avoid unnecessary recalculations.
Parameters
----------
steps : list of tuples
List of (step_name, transformers) tuples, where transformers is a
list of tuples (step_transformer_name, transformer). ``Pipeliner``
will create ``plan_table`` from this ``steps``, combining all
possible combinations of transformers, switching transformers on
each step.
eval_cv : int, cross-validation generator or an iterable, optional
Determines the evaluation cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross validation,
- integer, to specify the number of folds in a ``(Stratified)KFold``,
- An object to be used as cross-validation generator.
- A list or iterable yielding train, test splits.
For integer/None inputs, if the estimator is a classifier and ``y``
is either binary or multiclass, ``StratifiedKFold`` is used. In all
other cases, ``KFold`` is used.
Refer scikit-learn ``User Guide`` for the various cross-validation strategies that
can be used here.
grid_cv : int, cross-validation generator or an iterable, optional
Determines the grid search cross-validation splitting strategy.
Possible inputs for cv are the same as for ``eval_cv``.
param_grid : dict of dictionaries
Dictionary with classifiers names (string) as keys. The keys are
possible classifiers names in ``steps``. Each key corresponds to
grid search parameters.
banned_combos : list of tuples
List of (transformer_name_1, transformer_name_2) tuples. Each row
with both transformers will be removed from ``plan_table``.
Attributes
----------
plan_table : pandas DataFrame
Plan of pipelines evaluation. Created from ``steps``.
named_steps: dict of dictionaries
Dictionary with steps names as keys. Each key corresponds to
dictionary with transformers names from ``steps`` as keys.
You can get any transformer object from this dictionary.
Examples
--------
>>> from sklearn.datasets import make_classification
>>> from sklearn.preprocessing import StandardScaler
>>> from sklearn.preprocessing import MinMaxScaler
>>> from sklearn.model_selection import StratifiedKFold
>>> from sklearn.linear_model import LogisticRegression
>>> from sklearn.svm import SVC
>>> from reskit.core import Pipeliner
>>> X, y = make_classification()
>>> scalers = [('minmax', MinMaxScaler()), ('standard', StandardScaler())]
>>> classifiers = [('LR', LogisticRegression()), ('SVC', SVC())]
>>> steps = [('Scaler', scalers), ('Classifier', classifiers)]
>>> grid_cv = StratifiedKFold(n_splits=5, shuffle=True, random_state=0)
>>> eval_cv = StratifiedKFold(n_splits=5, shuffle=True, random_state=1)
>>> param_grid = {'LR' : {'penalty' : ['l1', 'l2']},
>>> 'SVC' : {'kernel' : ['linear', 'poly', 'rbf', 'sigmoid']}}
>>> pipe = Pipeliner(steps, eval_cv=eval_cv, grid_cv=grid_cv, param_grid=param_grid)
>>> pipe.get_results(X=X, y=y, scoring=['roc_auc'])
"""
def __init__(self, steps, grid_cv, eval_cv, param_grid=dict(),
banned_combos=list()):
steps = OrderedDict(steps)
columns = list(steps)
for column in columns:
steps[column] = OrderedDict(steps[column])
def accept_from_banned_combos(row_keys, banned_combo):
if set(banned_combo) - set(row_keys) == set():
return False
else:
return True
column_keys = [list(steps[column]) for column in columns]
plan_rows = list()
for row_keys in product(*column_keys):
accept = list()
for bnnd_cmb in banned_combos:
accept += [accept_from_banned_combos(row_keys, bnnd_cmb)]
if all(accept):
row_of_plan = OrderedDict()
for column, row_key in zip(columns, row_keys):
row_of_plan[column] = row_key
plan_rows.append(row_of_plan)
self.plan_table = DataFrame().from_dict(plan_rows)[columns]
self.named_steps = steps
self.eval_cv = eval_cv
self.grid_cv = grid_cv
self.param_grid = param_grid
self._cached_X = OrderedDict()
self.best_params = dict()
self.scores = dict()
def get_results(self, X, y=None, caching_steps=list(), scoring='accuracy',
logs_file='results.log', collect_n=None):
"""
Gives results dataframe by defined pipelines.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
caching_steps : list of strings
Steps which won’t be recalculated for each new pipeline.
If in previous pipeline exists the same steps, ``Pipeliner``
will start from this step.
scoring : string, callable or None, default=None
A string (see model evaluation documentation) or a scorer
callable object / function with signature
``scorer(estimator, X, y)``. If None, the score method of
the estimator is used.
logs_file : string
File name where logs will be saved.
collect_n : int
If not None scores will be calculated in following way. Each
score will be corresponds to average score on cross-validation
scores. The only thing that is changing for each score is
random_state, it shifts.
Returns
-------
results : DataFrame
Dataframe with all results about pipelines.
"""
if isinstance(scoring, str):
scoring = [scoring]
columns = list(self.plan_table.columns)
without_caching = [step for step in columns
if step not in caching_steps]
for metric in scoring:
grid_steps = ['grid_{}_mean'.format(metric),
'grid_{}_std'.format(metric),
'grid_{}_best_params'.format(metric)]
eval_steps = ['eval_{}_mean'.format(metric),
'eval_{}_std'.format(metric),
'eval_{}_scores'.format(metric)]
columns += grid_steps + eval_steps
results = DataFrame(columns=columns)
columns = list(self.plan_table.columns)
results[columns] = self.plan_table
with open(logs_file, 'w+') as logs:
N = len(self.plan_table.index)
for idx in self.plan_table.index:
print('Line: {}/{}'.format(idx + 1, N))
logs.write('Line: {}/{}\n'.format(idx + 1, N))
logs.write('{}\n'.format(str(self.plan_table.loc[idx])))
row = self.plan_table.loc[idx]
caching_keys = list(row[caching_steps].values)
time_point = time()
X_featured, y = self.transform_with_caching(X, y, caching_keys)
spent_time = round(time() - time_point, 3)
logs.write('Got Features: {} sec\n'.format(spent_time))
for metric in scoring:
logs.write('Scoring: {}\n'.format(metric))
ml_keys = list(row[without_caching].values)
time_point = time()
grid_res = self.get_grid_search_results(X_featured, y,
ml_keys,
metric)
spent_time = round(time() - time_point, 3)
logs.write('Grid Search: {} sec\n'.format(spent_time))
logs.write('Grid Search Results: {}\n'.format(grid_res))
for key, value in grid_res.items():
results.loc[idx][key] = value
time_point = time()
scores = self.get_scores(X_featured, y,
ml_keys,
metric,
collect_n)
spent_time = round(time() - time_point, 3)
logs.write('Got Scores: {} sec\n'.format(spent_time))
mean_key = 'eval_{}_mean'.format(metric)
scores_mean = mean(scores)
results.loc[idx][mean_key] = scores_mean
logs.write('Scores mean: {}\n'.format(scores_mean))
std_key = 'eval_{}_std'.format(metric)
scores_std = std(scores)
results.loc[idx][std_key] = scores_std
logs.write('Scores std: {}\n'.format(scores_std))
scores_key = 'eval_{}_scores'.format(metric)
results.loc[idx][scores_key] = str(scores)
logs.write('Scores: {}\n\n'.format(str(scores)))
return results
def transform_with_caching(self, X, y, row_keys):
"""
Transforms ``X`` with caching.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
row_keys : list of strings
List of transformers names. ``Pipeliner`` takes
transformers from ``named_steps`` using keys from
``row_keys`` and creates pipeline to transform.
Returns
-------
transformed_data : (X, y) tuple, where X and y array-like
Data transformed corresponding to pipeline, created from
``row_keys``, to (X, y) tuple.
"""
columns = list(self.plan_table.columns[:len(row_keys)])
def remove_unmatched_caching_X(row_keys):
cached_keys = list(self._cached_X)
unmatched_caching_keys = cached_keys.copy()
for row_key, cached_key in zip(row_keys, cached_keys):
if not row_key == cached_key:
break
unmatched_caching_keys.remove(row_key)
for unmatched_caching_key in unmatched_caching_keys:
del self._cached_X[unmatched_caching_key]
def transform_X_from_last_cached(row_keys, columns):
prev_key = list(self._cached_X)[-1]
for row_key, column in zip(row_keys, columns):
transformer = self.named_steps[column][row_key]
X = self._cached_X[prev_key]
self._cached_X[row_key] = transformer.fit_transform(X)
prev_key = row_key
if 'init' not in self._cached_X:
self._cached_X['init'] = X
transform_X_from_last_cached(row_keys, columns)
else:
row_keys = ['init'] + row_keys
columns = ['init'] + columns
remove_unmatched_caching_X(row_keys)
cached_keys = list(self._cached_X)
cached_keys_length = len(cached_keys)
for i in range(cached_keys_length):
del row_keys[0]
del columns[0]
transform_X_from_last_cached(row_keys, columns)
last_cached_key = list(self._cached_X)[-1]
return self._cached_X[last_cached_key], y
def get_grid_search_results(self, X, y, row_keys, scoring):
"""
Make grid search for pipeline, created from ``row_keys`` for
defined ``scoring``.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
row_keys : list of strings
List of transformers names. ``Pipeliner`` takes transformers
from ``named_steps`` using keys from ``row_keys`` and creates
pipeline to transform.
scoring : string, callable or None, default=None
A string (see model evaluation documentation) or a scorer
callable object / function with signature
``scorer(estimator, X, y)``. If None, the score method of the
estimator is used.
Returns
-------
results : dict
Dictionary with keys: ‘grid_{}_mean’, ‘grid_{}_std’ and
‘grid_{}_best_params’. In the middle of keys will be
corresponding scoring.
"""
classifier_key = row_keys[-1]
if classifier_key in self.param_grid:
columns = list(self.plan_table.columns)[-len(row_keys):]
steps = list()
for row_key, column in zip(row_keys, columns):
steps.append((row_key, self.named_steps[column][row_key]))
param_grid = dict()
for key, value in self.param_grid[classifier_key].items():
param_grid['{}__{}'.format(classifier_key, key)] = value
self.asdf = param_grid
self.asdfasdf = self.param_grid[classifier_key]
grid_clf = GridSearchCV(estimator=Pipeline(steps),
param_grid=param_grid,
scoring=scoring,
n_jobs=-1,
cv=self.grid_cv)
grid_clf.fit(X, y)
best_params = dict()
classifier_key_len = len(classifier_key)
for key, value in grid_clf.best_params_.items():
key = key[classifier_key_len + 2:]
best_params[key] = value
param_key = ''.join(row_keys) + str(scoring)
self.best_params[param_key] = best_params
results = dict()
for i, params in enumerate(grid_clf.cv_results_['params']):
if params == grid_clf.best_params_:
k = 'grid_{}_mean'.format(scoring)
results[k] = grid_clf.cv_results_['mean_test_score'][i]
k = 'grid_{}_std'.format(scoring)
results[k] = grid_clf.cv_results_['std_test_score'][i]
k = 'grid_{}_best_params'.format(scoring)
results[k] = str(best_params)
return results
else:
param_key = ''.join(row_keys) + str(scoring)
self.best_params[param_key] = dict()
results = dict()
results['grid_{}_mean'.format(scoring)] = 'NaN'
results['grid_{}_std'.format(scoring)] = 'NaN'
results['grid_{}_best_params'.format(scoring)] = 'NaN'
return results
def get_scores(self, X, y, row_keys, scoring, collect_n=None):
"""
Gives scores for prediction on cross-validation.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
row_keys : list of strings
List of transformers names. ``Pipeliner`` takes transformers
from ``named_steps`` using keys from ``row_keys`` and creates
pipeline to transform.
scoring : string, callable or None, default=None
A string (see model evaluation documentation) or a scorer
callable object / function with signature
``scorer(estimator, X, y)``. If None, the score method of the
estimator is used.
collect_n : list of strings
List of keys from data dictionary you want to collect and
create feature vectors.
Returns
-------
scores : array-like
Scores calculated on cross-validation.
"""
columns = list(self.plan_table.columns)[-len(row_keys):]
param_key = ''.join(row_keys) + str(scoring)
steps = list()
for row_key, column in zip(row_keys, columns):
steps.append((row_key, self.named_steps[column][row_key]))
steps[-1][1].set_params(**self.best_params[param_key])
if not collect_n:
scores = cross_val_score(Pipeline(steps), X, y,
scoring=scoring,
cv=self.eval_cv,
n_jobs=-1)
else:
init_random_state = self.eval_cv.random_state
scores = list()
for i in range(collect_n):
fold_prediction = cross_val_predict(Pipeline(steps), X, y,
cv=self.eval_cv,
n_jobs=-1)
metric = check_scoring(steps[-1][1],
scoring=scoring).__dict__['_score_func']
scores.append(metric(y, fold_prediction))
self.eval_cv.random_state += 1
self.eval_cv.random_state = init_random_state
return scores
class MatrixTransformer(TransformerMixin, BaseEstimator):
"""
Helps to add you own transformation through usual functions.
Parameters
----------
func : function
A function that transforms input data.
params : dict
Parameters for the function.
"""
def __init__(
self,
func,
**params):
self.func = func
self.params = params
def fit(self, X, y=None, **fit_params):
"""
Fits the data.
Parameters
----------
X : array-like
The data to fit. Should be a 3D array.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
"""
return self
def transform(self, X, y=None):
"""
Transforms the data according to function you set.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
"""
X = X.copy()
new_X = []
for i in range(len(X)):
new_X.append(self.func(X[i], **self.params))
return array(new_X)
class DataTransformer(TransformerMixin, BaseEstimator):
"""
Helps to add you own transformation through usual functions.
Parameters
----------
func : function
A function that transforms input data.
params : dict
Parameters for the function.
"""
def __init__(
self,
func,
**params):
self.func = func
self.params = params
def fit(self, X, y=None, **fit_params):
"""
Fits the data.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
"""
return self
def transform(self, X, y=None):
"""
Transforms the data according to function you set.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d, or
dictionary.
y : array-like, optional, default: None
The target variable to try to predict in the case of supervised learning.
"""
X = X.copy()
return self.func(X, **self.params)
__all__ = ['MatrixTransformer',
'DataTransformer',
'Pipeliner']
|
bsd-3-clause
| 620,806,418,496,660,200
| 36.42931
| 90
| 0.554655
| false
| 4.429504
| false
| false
| false
|
lcary/nbd
|
nbd/export.py
|
1
|
3791
|
from abc import (ABCMeta, abstractmethod, abstractproperty)
from os import path as ospath
import logging
import nbformat
from nbconvert import (PythonExporter, RSTExporter)
from nbd.fileops import (get_file_id, write_file)
EXPORT_FORMAT_PYTHON = 'python'
EXPORT_FORMAT_RST = 'rst'
logger = logging.getLogger()
class ExporterWrapper(object):
NOT_IMPL_MSG = 'Exporter wrapper not implemented.'
__metaclass__ = ABCMeta
@abstractproperty
def file_extension(self):
raise NotImplementedError(self.NOT_IMPL_MSG)
@abstractmethod
def export(self, basename, notebook_node, filepath):
raise NotImplementedError(self.NOT_IMPL_MSG)
def _export_content(self, notebook_node, filepath):
"""
Exports notebook data in a given format to a file in the output dir.
Returns notebook content and resources.
"""
(content, resources) = self.exporter.from_notebook_node(notebook_node)
write_file(filepath, content, write_mode='w')
return (content, resources)
def _get_filepath(self, output_dir, basename):
filename = "{}.{}".format(basename, self.file_extension)
return ospath.join(output_dir, filename)
class PythonExporterWrapper(ExporterWrapper):
def __init__(self):
self.exporter = PythonExporter()
@property
def file_extension(self):
return 'py'
def export(self, basename, notebook_node, output_dir):
"""
Exports notebook data in python format.
"""
filepath = self._get_filepath(output_dir, basename)
self._export_content(notebook_node, filepath)
class RSTExporterWrapper(ExporterWrapper):
def __init__(self):
self.exporter = RSTExporter()
@property
def file_extension(self):
return 'rst'
def export(self, basename, notebook_node, output_dir):
"""
Exports notebook data in rst format.
"""
filepath = self._get_filepath(output_dir, basename)
(content, resources) = self._export_content(notebook_node, filepath)
self._export_resources(basename, output_dir, resources)
def _export_resources(self, basename, output_dir, resources):
"""
Exports any additional resources (e.g. PNG files in notebook)
"""
try:
for (filename, b64data) in resources['outputs'].items():
filepath = self._get_resource_filepath(output_dir, basename, filename)
write_file(filepath, b64data, write_mode='wb')
except AttributeError:
logger.debug('Unable to find resources in notebook when exporting RST.')
@classmethod
def _get_resource_filepath(cls, output_dir, basename, filename):
filename = get_file_id(basename + "__" + filename)
return ospath.join(output_dir, filename)
class NotebookExporter(object):
"""
Process a list of notebooks by creating a directory and exporting
notebooks to the specified formats (python, rst, and binary files)
"""
DEFAULT_EXPORT_FORMATS = (EXPORT_FORMAT_PYTHON, EXPORT_FORMAT_RST)
def __init__(self, nbformat_version, export_formats=None):
self.nbformat_version = nbformat_version
self._export_formats = self._get_export_formats(export_formats)
self.python_exporter = PythonExporterWrapper()
self.rst_exporter = RSTExporterWrapper()
def _get_export_formats(self, export_formats):
if export_formats is None:
return list(self.DEFAULT_EXPORT_FORMATS)
else:
return export_formats
def process_notebook(self, basename, filepath, output_dir):
"""
Reads a notebook of a given format, then exports data.
"""
notebook_node = nbformat.read(filepath, as_version=self.nbformat_version)
if EXPORT_FORMAT_PYTHON in self._export_formats:
self.python_exporter.export(basename, notebook_node, output_dir)
if EXPORT_FORMAT_RST in self._export_formats:
self.rst_exporter.export(basename, notebook_node, output_dir)
|
mit
| 7,295,198,112,324,375,000
| 30.330579
| 78
| 0.710367
| false
| 3.837045
| false
| false
| false
|
GeosoftInc/gxpy
|
geosoft/gxapi/GXSTR.py
|
1
|
48660
|
### extends 'class_empty.py'
### block ClassImports
# NOTICE: Do not edit anything here, it is generated code
from . import gxapi_cy
from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref
### endblock ClassImports
### block Header
# NOTICE: The code generator will not replace the code in this block
### endblock Header
### block ClassImplementation
# NOTICE: Do not edit anything here, it is generated code
class GXSTR(gxapi_cy.WrapSTR):
"""
GXSTR class.
This library is not a class. Use the `GXSTR <geosoft.gxapi.GXSTR>` library functions
to work with and manipulate string variables. Since the
GX Programming Language does not provide string literal
tokens, you must use these functions for any string operations
you want to perform.
"""
def __init__(self, handle=0):
super(GXSTR, self).__init__(GXContext._get_tls_geo(), handle)
@classmethod
def null(cls):
"""
A null (undefined) instance of `GXSTR <geosoft.gxapi.GXSTR>`
:returns: A null `GXSTR <geosoft.gxapi.GXSTR>`
:rtype: GXSTR
"""
return GXSTR()
def is_null(self):
"""
Check if this is a null (undefined) instance
:returns: True if this is a null (undefined) instance, False otherwise.
:rtype: bool
"""
return self._internal_handle() == 0
# Data Input
@classmethod
def scan_i(cls, str_val):
"""
Convert a string to a GX int.
:param str_val: String to convert to an integer
:type str_val: str
:returns: Resulting Integer, `iDUMMY <geosoft.gxapi.iDUMMY>` is bad integer
:rtype: int
.. versionadded:: 6.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._scan_i(GXContext._get_tls_geo(), str_val.encode())
return ret_val
@classmethod
def scan_date(cls, str_val, type):
"""
Convert a date string to a GX real.
:param str_val: Date string
:param type: :ref:`DATE_FORMAT`
:type str_val: str
:type type: int
:returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails.
:rtype: float
.. versionadded:: 6.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** OLD usage, use ScanForm_STR instead.
"""
ret_val = gxapi_cy.WrapSTR._scan_date(GXContext._get_tls_geo(), str_val.encode(), type)
return ret_val
@classmethod
def scan_form(cls, str_val, type):
"""
Convert a formated string to a real.
:param str_val: Date string
:param type: :ref:`GS_FORMATS`
:type str_val: str
:type type: int
:returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails.
:rtype: float
.. versionadded:: 6.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._scan_form(GXContext._get_tls_geo(), str_val.encode(), type)
return ret_val
@classmethod
def scan_r(cls, str_val):
"""
Convert a string to a GX real.
:param str_val: String to convert to a real
:type str_val: str
:returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if bad string.
:rtype: float
.. versionadded:: 6.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._scan_r(GXContext._get_tls_geo(), str_val.encode())
return ret_val
@classmethod
def scan_time(cls, str_val, type):
"""
Convert a time string to a GX real.
:param str_val: Date string
:param type: :ref:`TIME_FORMAT`
:type str_val: str
:type type: int
:returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails.
:rtype: float
.. versionadded:: 6.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** OLD usage, use ScanForm_STR instead.
"""
ret_val = gxapi_cy.WrapSTR._scan_time(GXContext._get_tls_geo(), str_val.encode(), type)
return ret_val
# File Name
@classmethod
def file_combine_parts(cls, drive, dir, file, ext, qual, file_name):
"""
Combine file parts to build a file name.
:param drive: Drive
:param dir: Directory
:param file: Name
:param ext: Extension
:param qual: Qualifiers
:param file_name: Destination string, can be same as input
:type drive: str
:type dir: str
:type file: str
:type ext: str
:type qual: str
:type file_name: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
file_name.value = gxapi_cy.WrapSTR._file_combine_parts(GXContext._get_tls_geo(), drive.encode(), dir.encode(), file.encode(), ext.encode(), qual.encode(), file_name.value.encode())
@classmethod
def file_ext(cls, ifile, ext, ofile, opt):
"""
Add a file extension onto a file name string.
:param ifile: File name to extend
:param ext: Extension if "", extenstion and '.' are stripped.
:param ofile: Extended file name (can be same as input)
:param opt: :ref:`FILE_EXT`
:type ifile: str
:type ext: str
:type ofile: str_ref
:type opt: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ofile.value = gxapi_cy.WrapSTR._file_ext(GXContext._get_tls_geo(), ifile.encode(), ext.encode(), ofile.value.encode(), opt)
@classmethod
def file_name_part(cls, file, file_part, part):
"""
Get part of a file name.
:param file: File name
:param file_part: Destination string, can be same as input
:param part: :ref:`STR_FILE_PART`
:type file: str
:type file_part: str_ref
:type part: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
file_part.value = gxapi_cy.WrapSTR._file_name_part(GXContext._get_tls_geo(), file.encode(), file_part.value.encode(), part)
@classmethod
def get_m_file(cls, in_str, out_str, index):
"""
Get the indexed filepath from a multiple filepath string
:param in_str: Input multifile string
:param out_str: Output filepath string
:param index: Index of file
:type in_str: str
:type out_str: str_ref
:type index: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** The multifile string must use '|' as a delimiter.
Do not pass a string after calling `tokenize <geosoft.gxapi.GXSTR.tokenize>`.
"""
out_str.value = gxapi_cy.WrapSTR._get_m_file(GXContext._get_tls_geo(), in_str.encode(), out_str.value.encode(), index)
@classmethod
def remove_qualifiers(cls, ifile, ofile):
"""
Remove file qualifiers from a file name
:param ifile: Input file name
:param ofile: Output file name (can be same as input)
:type ifile: str
:type ofile: str_ref
.. versionadded:: 7.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ofile.value = gxapi_cy.WrapSTR._remove_qualifiers(GXContext._get_tls_geo(), ifile.encode(), ofile.value.encode())
# Formating
@classmethod
def format_crc(cls, pul_crc, buff, width):
"""
Convert a GX CRC value to a string.
:param pul_crc: CRC value to format
:param buff: Resulting string
:param width: Width of the field
:type pul_crc: int
:type buff: str_ref
:type width: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_crc(GXContext._get_tls_geo(), pul_crc, buff.value.encode(), width)
@classmethod
def format_date(cls, real, buff, width, type):
"""
Convert a GX real to a date string.
:param real: Date value in decimal years to format
:param buff: Resulting string
:param width: Width of the field
:param type: :ref:`DATE_FORMAT`
:type real: float
:type buff: str_ref
:type width: int
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_date(GXContext._get_tls_geo(), real, buff.value.encode(), width, type)
@classmethod
def format_i(cls, value, buff, width):
"""
Convert a GX int to a string.
:param value: Value to format
:param buff: Resulting string
:param width: Width of the field
:type value: int
:type buff: str_ref
:type width: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_i(GXContext._get_tls_geo(), value, buff.value.encode(), width)
@classmethod
def format_r(cls, real, buff, width, sig):
"""
Convert a GX real to a string with significant digits.
:param real: Value to format
:param buff: Resulting string
:param width: Width of the field
:param sig: Significant digits
:type real: float
:type buff: str_ref
:type width: int
:type sig: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_r(GXContext._get_tls_geo(), real, buff.value.encode(), width, sig)
@classmethod
def format_r2(cls, real, buff, width, sig):
"""
Convert a GX real to a string with given decimals.
:param real: Value to format
:param buff: Resulting string
:param width: Width of the field
:param sig: Decimals
:type real: float
:type buff: str_ref
:type width: int
:type sig: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_r2(GXContext._get_tls_geo(), real, buff.value.encode(), width, sig)
@classmethod
def format_double(cls, real, buff, type, width, dec):
"""
Convert a GX real to a string.
:param real: Value to format
:param buff: Resulting string
:param type: :ref:`GS_FORMATS`
:param width: Width of the field
:param dec: Significant digits/decimals
:type real: float
:type buff: str_ref
:type type: int
:type width: int
:type dec: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_double(GXContext._get_tls_geo(), real, buff.value.encode(), type, width, dec)
@classmethod
def format_time(cls, real, buff, width, deci, type):
"""
Convert a GX real to a time string.
:param real: Time value in decimal hours to format
:param buff: Resulting string
:param width: Width of the field
:param deci: Decimals to format with
:param type: :ref:`TIME_FORMAT`
:type real: float
:type buff: str_ref
:type width: int
:type deci: int
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
buff.value = gxapi_cy.WrapSTR._format_time(GXContext._get_tls_geo(), real, buff.value.encode(), width, deci, type)
# General
@classmethod
def escape(cls, str_val, opt):
"""
Convert/replace escape sequences in strings.
:param str_val: String to modify
:param opt: :ref:`STR_ESCAPE`
:type str_val: str_ref
:type opt: int
.. versionadded:: 5.0.6
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Escape characters:
\\a bell
\\b backspace
\\f formfeed
\\n new line
\\r carriage return
\\t tab
\\v vertical tab
\\" quote character
\\x take 'x' literally
\\ backslash
\\ooo octal up to 3 characters
\\xhh hex up to 2 characters
A common use of this function is to convert double-quote characters in
a user unput string to \\" so the string can be placed in a tokenized
string.
"""
str_val.value = gxapi_cy.WrapSTR._escape(GXContext._get_tls_geo(), str_val.value.encode(), opt)
@classmethod
def char_(cls, str_val):
"""
Returns the ASCII value of a character.
:param str_val: String to return ascii value of first character
:type str_val: str
:returns: ASCII value of first character in string.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._char_(GXContext._get_tls_geo(), str_val.encode())
return ret_val
@classmethod
def char_n(cls, str_val, c, max):
"""
Returns the ASCII value of the n'th character.
:param str_val: String
:param c: Character to get
:param max: Maximum string length (unused)
:type str_val: str
:type c: int
:type max: int
:returns: ASCII value of n'th character in string.
The first character is 0.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._char_n(GXContext._get_tls_geo(), str_val.encode(), c, max)
return ret_val
@classmethod
def justify(cls, in_str, out_str, width, just):
"""
Justify a string
:param in_str: String to justify
:param out_str: Result string, can be same as input
:param width: Justification width
:param just: :ref:`STR_JUSTIFY`
:type in_str: str
:type out_str: str_ref
:type width: int
:type just: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** If the string is too big to fit in the number of display characters,
the output string will be "**" justified as specified.
"""
out_str.value = gxapi_cy.WrapSTR._justify(GXContext._get_tls_geo(), in_str.encode(), out_str.value.encode(), width, just)
@classmethod
def replacei_match_string(cls, istr, old, new_str):
"""
Replaces all occurances of match string by replacement string with case insensitive.
:param istr: Destination String
:param old: Match string to replace
:param new_str: Replacement string
:type istr: str_ref
:type old: str
:type new_str: str
.. versionadded:: 7.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** If the replacement string is "" (NULL character)
then the string to replace is removed from the
input string, and the string is shortened.
"""
istr.value = gxapi_cy.WrapSTR._replacei_match_string(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_str.encode())
@classmethod
def replace_match_string(cls, istr, old, new_str):
"""
Replaces all occurances of match string by replacement string with case sensitive.
:param istr: Destination String
:param old: Match string to replace
:param new_str: Replacement string
:type istr: str_ref
:type old: str
:type new_str: str
.. versionadded:: 7.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** If the replacement string is "" (NULL character)
then the string to replace is removed from the
input string, and the string is shortened.
"""
istr.value = gxapi_cy.WrapSTR._replace_match_string(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_str.encode())
@classmethod
def set_char_n(cls, str_val, c, ascii):
"""
Set the n'th character of a string using an ASCII value
:param str_val: String
:param c: Character to set
:param ascii: ASCII value
:type str_val: str_ref
:type c: int
:type ascii: int
.. versionadded:: 5.1.4
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
str_val.value = gxapi_cy.WrapSTR._set_char_n(GXContext._get_tls_geo(), str_val.value.encode(), c, ascii)
@classmethod
def split_string(cls, origstr, ch, split):
"""
Splits a string in two on a character.
:param origstr: Original string
:param ch: Split character (first character of string)
:param split: Split string past split character.
:type origstr: str_ref
:type ch: str
:type split: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** The original string is modified by terminating it
at the character split.
The part of the string past the character split is
copied to the split string.
Split characters in quoted strings are ignored.
This function is mainly intended to separate comments
from control file strings.
"""
origstr.value, split.value = gxapi_cy.WrapSTR._split_string(GXContext._get_tls_geo(), origstr.value.encode(), ch.encode(), split.value.encode())
@classmethod
def strcat(cls, dest, orig):
"""
This method contatinates a string.
:param dest: Destination String
:param orig: String to add
:type dest: str_ref
:type orig: str
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
dest.value = gxapi_cy.WrapSTR._strcat(GXContext._get_tls_geo(), dest.value.encode(), orig.encode())
@classmethod
def strcmp(cls, first, second, case_sensitive):
"""
This method compares two strings and returns these values
:param first: String A
:param second: String B
:param case_sensitive: :ref:`STR_CASE`
:type first: str
:type second: str
:type case_sensitive: int
:returns: A < B -1
A == B 0
A > B 1
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._strcmp(GXContext._get_tls_geo(), first.encode(), second.encode(), case_sensitive)
return ret_val
@classmethod
def strcpy(cls, dest, orig):
"""
This method copies a string into another string.
:param dest: Destination string
:param orig: Origin string
:type dest: str_ref
:type orig: str
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
dest.value = gxapi_cy.WrapSTR._strcpy(GXContext._get_tls_geo(), dest.value.encode(), orig.encode())
@classmethod
def stri_mask(cls, mask, test):
"""
Case insensitive comparison of two strings.
:param mask: Mask
:param test: String to test
:type mask: str
:type test: str
:returns: 0 if string does not match mask.
1 if string matches mask.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Mask characters '*' - matches any one or more up to
next character
'?' - matches one character
Test is case insensitive
"""
ret_val = gxapi_cy.WrapSTR._stri_mask(GXContext._get_tls_geo(), mask.encode(), test.encode())
return ret_val
@classmethod
def strins(cls, dest, ins, orig):
"""
This method inserts a string at a specified position.
:param dest: Destination String
:param ins: Insert Position
:param orig: String to add
:type dest: str_ref
:type ins: int
:type orig: str
.. versionadded:: 5.1.8
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** If the specified position does not fall within the current string
the source string will simply be Concatenated.
"""
dest.value = gxapi_cy.WrapSTR._strins(GXContext._get_tls_geo(), dest.value.encode(), ins, orig.encode())
@classmethod
def strlen(cls, str_val):
"""
Returns the length of a string.
:param str_val: String to find the length of
:type str_val: str
:returns: String length.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._strlen(GXContext._get_tls_geo(), str_val.encode())
return ret_val
@classmethod
def str_mask(cls, mask, test):
"""
Case sensitive comparison of two strings.
:param mask: Mask
:param test: String to test
:type mask: str
:type test: str
:returns: 0 if string does not match mask.
1 if string matches mask.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Mask characters '*' - matches any one or more up to
next character
'?' - matches one character
Test is case sensitive
"""
ret_val = gxapi_cy.WrapSTR._str_mask(GXContext._get_tls_geo(), mask.encode(), test.encode())
return ret_val
@classmethod
def str_min(cls, str_val):
"""
Remove spaces and tabs and return length
:param str_val: String to find the min length of
:type str_val: str_ref
:returns: String length.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** String may be modified. This function should not be
used to determine if a file name string is defined, because
a valid file name can contain spaces, and once "tested" the
name will be altered. Instead, use `str_min2 <geosoft.gxapi.GXSTR.str_min2>`, or use
`GXSYS.file_exist <geosoft.gxapi.GXSYS.file_exist>` to see if the file actually exists.
"""
ret_val, str_val.value = gxapi_cy.WrapSTR._str_min(GXContext._get_tls_geo(), str_val.value.encode())
return ret_val
@classmethod
def str_min2(cls, str_val):
"""
Length less spaces and tabs, string unchanged.
:param str_val: String to find the min length of
:type str_val: str
:returns: String length.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._str_min2(GXContext._get_tls_geo(), str_val.encode())
return ret_val
@classmethod
def strncmp(cls, first, second, n_char, case_sensitive):
"""
Compares two strings to a given number of characters.
:param first: String A
:param second: String B
:param n_char: Number of characters to compare
:param case_sensitive: :ref:`STR_CASE`
:type first: str
:type second: str
:type n_char: int
:type case_sensitive: int
:returns: A < B -1
A == B 0
A > B 1
:rtype: int
.. versionadded:: 5.0.5
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._strncmp(GXContext._get_tls_geo(), first.encode(), second.encode(), n_char, case_sensitive)
return ret_val
@classmethod
def str_str(cls, str_val, sub, case_sensitive):
"""
Scan a string for the occurrence of a given substring.
:param str_val: String to scan
:param sub: String to look for
:param case_sensitive: :ref:`STR_CASE`
:type str_val: str
:type sub: str
:type case_sensitive: int
:returns: -1 if the substring does not occur in the string
Index of first matching location if found
:rtype: int
.. versionadded:: 5.1.6
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapSTR._str_str(GXContext._get_tls_geo(), str_val.encode(), sub.encode(), case_sensitive)
return ret_val
@classmethod
def substr(cls, dest, orig, start, length):
"""
Extract part of a string.
:param dest: Destination string
:param orig: Origin string
:param start: Start location
:param length: Number of characters
:type dest: str_ref
:type orig: str
:type start: int
:type length: int
.. versionadded:: 6.2
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** The destination string length will be less than the
requested length if the substring is not fully enclosed
in the origin string.
"""
dest.value = gxapi_cy.WrapSTR._substr(GXContext._get_tls_geo(), dest.value.encode(), orig.encode(), start, length)
@classmethod
def to_lower(cls, str_val):
"""
Convert a string to lower case.
:param str_val: String
:type str_val: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
str_val.value = gxapi_cy.WrapSTR._to_lower(GXContext._get_tls_geo(), str_val.value.encode())
@classmethod
def to_upper(cls, str_val):
"""
Convert a string to upper case.
:param str_val: String
:type str_val: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
str_val.value = gxapi_cy.WrapSTR._to_upper(GXContext._get_tls_geo(), str_val.value.encode())
@classmethod
def xyz_line(cls, line, xyz):
"""
Make a valid XYZ line name from a valid `GXDB <geosoft.gxapi.GXDB>` line name.
:param line: Line name to convert
:param xyz: Buffer to hold new line name
:type line: str
:type xyz: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
xyz.value = gxapi_cy.WrapSTR._xyz_line(GXContext._get_tls_geo(), line.encode(), xyz.value.encode())
@classmethod
def make_alpha(cls, str_val):
"""
Turns all non alpha-numeric characters into an _.
:param str_val: String to trim
:type str_val: str_ref
.. versionadded:: 5.1.8
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** THE STRING IS MODIFIED.
"""
str_val.value = gxapi_cy.WrapSTR._make_alpha(GXContext._get_tls_geo(), str_val.value.encode())
@classmethod
def printf(cls, dest, mask):
"""
Variable Argument PrintF function
:param dest: Destination string
:param mask: Pattern string
:type dest: str_ref
:type mask: str
.. versionadded:: 7.3
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
dest.value = gxapi_cy.WrapSTR._printf(GXContext._get_tls_geo(), dest.value.encode(), mask.encode())
@classmethod
def replace_char(cls, istr, old, new_char):
"""
Replaces characters in a string.
:param istr: String to modify
:param old: Character to replace (first character only)
:param new_char: Replacement character (first character only)
:type istr: str_ref
:type old: str
:type new_char: str
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** If the input replacement character is "", then the
string will be truncated at the first character to replace.
"""
istr.value = gxapi_cy.WrapSTR._replace_char(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_char.encode())
@classmethod
def replace_char2(cls, istr, old, new_char):
"""
Replaces characters in a string, supports simple removal.
:param istr: String to modify
:param old: Character to replace (first character only)
:param new_char: Replacement character (first character only)
:type istr: str_ref
:type old: str
:type new_char: str
.. versionadded:: 6.3
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** If the replacement character is "" (NULL character)
then the character to replace is removed from the
input string, and the string is shortened.
"""
istr.value = gxapi_cy.WrapSTR._replace_char2(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_char.encode())
@classmethod
def replace_multi_char(cls, istr, old, new_char):
"""
Replaces multiple characters in a string.
:param istr: String to modify
:param old: Characters to replace
:param new_char: Replacement characters
:type istr: str_ref
:type old: str
:type new_char: str
.. versionadded:: 5.1.5
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** The number of characters to replace must equal
the number of replacement characters.
"""
istr.value = gxapi_cy.WrapSTR._replace_multi_char(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_char.encode())
@classmethod
def replace_non_ascii(cls, str_val, rpl):
"""
Replace non-ASCII characters in a string.
:param str_val: String to modify
:param rpl: Replacement character
:type str_val: str_ref
:type rpl: str
.. versionadded:: 6.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** All characthers > 127 will be replaced by the first character
of the replacement string.
"""
str_val.value = gxapi_cy.WrapSTR._replace_non_ascii(GXContext._get_tls_geo(), str_val.value.encode(), rpl.encode())
@classmethod
def set_char(cls, str_val, ascii):
"""
Set a string's first character using an ASCII value of a character.
:param str_val: String
:param ascii: ASCII value
:type str_val: str_ref
:type ascii: int
.. versionadded:: 5.1.4
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
str_val.value = gxapi_cy.WrapSTR._set_char(GXContext._get_tls_geo(), str_val.value.encode(), ascii)
@classmethod
def trim_quotes(cls, str_val):
"""
Remove double quotes.
:param str_val: String to trim
:type str_val: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** THE STRING IS MODIFIED.
This method goes through the string and removes all spaces in a
string except those enclosed in quotes. It then removes
any quotes. It is usfull for trimming unwanted spaces from
an input string but allows the user to use quotes as well.
If a quote follows a backslash, the quote is retained and
the backslash is deleted. These quotes are NOT treated as
delimiters.
"""
str_val.value = gxapi_cy.WrapSTR._trim_quotes(GXContext._get_tls_geo(), str_val.value.encode())
@classmethod
def trim_space(cls, str_val, trim):
"""
Remove leading and/or trailing whitespace.
:param str_val: String to trim
:param trim: :ref:`STR_TRIM`
:type str_val: str_ref
:type trim: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** THE STRING IS MODIFIED.
Whitespace characters are defined as space, tab, carriage return,
new line, vertical tab or formfeed (0x09 to 0x0D, 0x20)
"""
str_val.value = gxapi_cy.WrapSTR._trim_space(GXContext._get_tls_geo(), str_val.value.encode(), trim)
@classmethod
def un_quote(cls, str_val):
"""
Remove double quotes from string
:param str_val: String to unquote
:type str_val: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** THE STRING IS MODIFIED.
The pointers will be advanced past a first character
quote and a last character quote will be set to .\\0'.
Both first and last characters must be quotes for the
triming to take place.
"""
str_val.value = gxapi_cy.WrapSTR._un_quote(GXContext._get_tls_geo(), str_val.value.encode())
# Misc
@classmethod
def gen_group_name(cls, istr1, istr2, istr3, ostr):
"""
Generate a group name string
from type string, database and channel(optional) strings..
:param istr1: Input type string (static part)
:param istr2: Input db string
:param istr3: Input ch string (could be 0 length)
:param ostr: Output group name string
:type istr1: str
:type istr2: str
:type istr3: str
:type ostr: str_ref
.. versionadded:: 5.1.4
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** The output group name string is formed in the way of typestr_dbstr_chstr.
If the database/channel strings is too long to fit the output string
(max total length of 1040, including the NULL ending), then
the typestr will always be kept the full length to be the first part,
while the dbstr and/or chstr will be shortened to be the
second and/or third part of the output string.
.. seealso::
GenNewGroupName_MVIEW
"""
ostr.value = gxapi_cy.WrapSTR._gen_group_name(GXContext._get_tls_geo(), istr1.encode(), istr2.encode(), istr3.encode(), ostr.value.encode())
# Tokenizing
@classmethod
def count_tokens(cls, str_val, delims):
"""
Counts number of tokens.
:param str_val: String to tokenize
:param delims: Delimiter characters
:type str_val: str
:type delims: str
:returns: Number of tokens in the string.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Delimiters are "soft" in that one or more delimiters
is considered a single delimiter, and preceding and
trailing delimiters are ignored.
DO NOT use this function except in GXC code. The corresponding
`get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code.
"""
ret_val = gxapi_cy.WrapSTR._count_tokens(GXContext._get_tls_geo(), str_val.encode(), delims.encode())
return ret_val
@classmethod
def get_token(cls, dest, orig, tok):
"""
Get a token from a tokenized string.
:param dest: Destination string
:param orig: Tokenized string
:param tok: Token number wanted (0 is the first!)
:type dest: str_ref
:type orig: str
:type tok: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Call `tokens <geosoft.gxapi.GXSTR.tokens>` to prepare the tokenized
string.
You MUST NOT get tokens beyond number of tokens returned
by `tokens <geosoft.gxapi.GXSTR.tokens>` or `tokens2 <geosoft.gxapi.GXSTR.tokens2>`.
The first token has index 0.
DO NOT use this function except in GXC code.
`get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code.
.. seealso::
`tokens <geosoft.gxapi.GXSTR.tokens>`, GetToken_STR
"""
dest.value = gxapi_cy.WrapSTR._get_token(GXContext._get_tls_geo(), dest.value.encode(), orig.encode(), tok)
@classmethod
def tokenize(cls, str_val, soft, hard, esc, quote):
"""
Tokenize a string based on any characters.
:param str_val: `GXSTR <geosoft.gxapi.GXSTR>` - String containing token(s)
:param soft: szSoft - Soft delimiters (spaces/tabs)
:param hard: szHard - Hard delimiters (commas)
:param esc: szEsc - Escape delimiters (back-slash)
:param quote: szQuote- Quote delimiters (quote characters)
:type str_val: str_ref
:type soft: str
:type hard: str
:type esc: str
:type quote: str
:returns: Number of tokens
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** This uses a finite state machine to tokenize on these
rules:
1. Any one character following an escape delimiter is
treated as a normal character.
2. Any characters inside a quote string are treated as
normal characters.
3. Any number of Soft delimiters in sequence without a
hard delimiter are treated as one hard delimited.
4. Any number of soft delimiters can preceed or follow
a hard delimiter and are ignored.
EXAMPLE
Soft = [ ] Hard = [,] Escape = [\\] Quote = ["]
[this is a , , the "test," of , \\,\\" my delimite fi,]
Results in:
[this] [is] [a] [] [the] ["test,"] [of] [\\,\\"] [my] [delimite] [fi] []
NOT use this function except in GXC code. The corresponding
etToken_STR function will not operate correctly in GX.Net code.
.. seealso::
GetToken_STR
"""
ret_val, str_val.value = gxapi_cy.WrapSTR._tokenize(GXContext._get_tls_geo(), str_val.value.encode(), soft.encode(), hard.encode(), esc.encode(), quote.encode())
return ret_val
@classmethod
def tokens(cls, str_val, delims):
"""
Tokenize a string
:param str_val: String to tokenize
:param delims: Delimiter characters
:type str_val: str_ref
:type delims: str
:returns: Number of tokens, maximum is 2048
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Delimiters in the string are reduced to a single NULL.
Delimiters withing double quoted strings are ignored.
Use GetToken_STR to extract tokens.
DO NOT use this function except in GXC code. The corresponding
`get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code.
.. seealso::
`tokens2 <geosoft.gxapi.GXSTR.tokens2>`, GetToken_STR
"""
ret_val, str_val.value = gxapi_cy.WrapSTR._tokens(GXContext._get_tls_geo(), str_val.value.encode(), delims.encode())
return ret_val
@classmethod
def tokens2(cls, str_val, soft, hard, esc, quote):
"""
General tokenize a string
:param str_val: String to tokenize
:param soft: szSoft - Soft delimiters (spaces/tabs)
:param hard: szHard - Hard delimiters (commas)
:param esc: szEsc - Escape delimiters (back-slash)
:param quote: szQuote- Quote delimiters (quote characters)
:type str_val: str_ref
:type soft: str
:type hard: str
:type esc: str
:type quote: str
:returns: Number of Tokens
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** This function is for old GX compatibility only.
See `tokenize <geosoft.gxapi.GXSTR.tokenize>`.
DO NOT use this function except in GXC code. The corresponding
`get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code.
"""
ret_val, str_val.value = gxapi_cy.WrapSTR._tokens2(GXContext._get_tls_geo(), str_val.value.encode(), soft.encode(), hard.encode(), esc.encode(), quote.encode())
return ret_val
@classmethod
def parse_list(cls, str_val, gvv):
"""
Parse a tokenized list to get a selection list.
:param str_val: String to be parsed
:param gvv: Selection Buffer to fill
:type str_val: str
:type gvv: GXVV
.. versionadded:: 5.0.1
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Given a list such as "1,3,4,6-9,12", it fills the
input buffer with 1 if the number is selected,
0 if not. The items are delimited with spaces
or commas, and ranges are acceptable, either using
a "-" or ":", e.g. 3-6 and 3:6 both mean 3,4,5, and 6.
Only values from 0 to one less than the buffer length
are used. Out-of-range values are ignored.
"""
gxapi_cy.WrapSTR._parse_list(GXContext._get_tls_geo(), str_val.encode(), gvv)
### endblock ClassImplementation
### block ClassExtend
# NOTICE: The code generator will not replace the code in this block
### endblock ClassExtend
### block Footer
# NOTICE: The code generator will not replace the code in this block
### endblock Footer
|
bsd-2-clause
| -3,684,899,027,531,421,700
| 31.354388
| 188
| 0.58097
| false
| 3.722745
| false
| false
| false
|
wbali/rwc-inventory-manager
|
inventory_manager_app/stock/forms.py
|
1
|
1742
|
from flask_wtf import FlaskForm
from wtforms import StringField, DateField, IntegerField, SelectField, DecimalField
from wtforms.validators import DataRequired, Optional
from wtforms.widgets import TextArea
class StockForm(FlaskForm):
stock_id = IntegerField()
item_name = StringField("Item name", validators=[
DataRequired(message="Please enter the item's name.")
])
arrival_date = DateField("Arrival date", validators=[Optional()])
price = DecimalField("Price", validators=[
DataRequired(message="Please enter the item's price.")
])
shipping_date = DateField("Shipping date", validators=[Optional()])
selling_price = DecimalField("Selling-price", validators=[Optional()])
quantity = IntegerField("Quantity", validators=[Optional()])
customer = SelectField(validators=[Optional()], coerce=int)
vendor = SelectField(validators=[Optional()], coerce=int)
billing_date = DateField("Billing date", validators=[Optional()])
notes = StringField("Notes", widget=TextArea())
barcode = StringField("Barcode", validators=[Optional()])
class VendorForm(FlaskForm):
vendor_id = IntegerField()
name = StringField("Vendor name", validators=[
DataRequired(message="Please enter the vendor's name.")
])
address = StringField("Address", validators=[
DataRequired(message="Please enter the vendor's address.")
])
class CustomerForm(FlaskForm):
customer_id = IntegerField()
name = StringField("Customer name", validators=[
DataRequired(message="Please enter the customer's name.")
])
address = StringField("Address", validators=[
DataRequired(message="Please enter the customer's address.")
])
|
mit
| -1,446,767,546,980,391,400
| 27.57377
| 83
| 0.695178
| false
| 4.695418
| false
| false
| false
|
Arcanemagus/SickRage
|
sickbeard/providers/rarbg.py
|
1
|
6962
|
# coding=utf-8
# Author: Dustyn Gibson <miigotu@gmail.com>
#
# URL: https://sick-rage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import datetime
import time
import sickbeard
from sickbeard import logger, tvcache
from sickbeard.common import cpu_presets
from sickbeard.indexers.indexer_config import INDEXER_TVDB
from sickrage.helper.common import convert_size, try_int
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class RarbgProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
TorrentProvider.__init__(self, "Rarbg")
self.public = True
self.minseed = None
self.ranked = None
self.sorting = None
self.minleech = None
self.token = None
self.token_expires = None
# Spec: https://torrentapi.org/apidocs_v2.txt
self.url = "https://rarbg.com"
self.urls = {"api": "http://torrentapi.org/pubapi_v2.php"}
self.proper_strings = ["{{PROPER|REPACK}}"]
self.cache = tvcache.TVCache(self, min_time=10) # only poll RARBG every 10 minutes max
def login(self):
if self.token and self.token_expires and datetime.datetime.now() < self.token_expires:
return True
login_params = {
"get_token": "get_token",
"format": "json",
"app_id": "sickrage2"
}
response = self.get_url(self.urls["api"], params=login_params, returns="json")
if not response:
logger.log("Unable to connect to provider", logger.WARNING)
return False
self.token = response.get("token")
self.token_expires = datetime.datetime.now() + datetime.timedelta(minutes=14) if self.token else None
return self.token is not None
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-branches, too-many-locals, too-many-statements
results = []
if not self.login():
return results
search_params = {
"app_id": "sickrage2",
"category": "tv",
"min_seeders": try_int(self.minseed),
"min_leechers": try_int(self.minleech),
"limit": 100,
"format": "json_extended",
"ranked": try_int(self.ranked),
"token": self.token,
}
if ep_obj is not None:
ep_indexerid = ep_obj.show.indexerid
ep_indexer = ep_obj.show.indexer
else:
ep_indexerid = None
ep_indexer = None
for mode in search_strings:
items = []
logger.log("Search Mode: {0}".format(mode), logger.DEBUG)
if mode == "RSS":
search_params["sort"] = "last"
search_params["mode"] = "list"
search_params.pop("search_string", None)
search_params.pop("search_tvdb", None)
else:
search_params["sort"] = self.sorting if self.sorting else "seeders"
search_params["mode"] = "search"
if ep_indexer == INDEXER_TVDB and ep_indexerid:
search_params["search_tvdb"] = ep_indexerid
else:
search_params.pop("search_tvdb", None)
for search_string in search_strings[mode]:
if mode != "RSS":
search_params["search_string"] = search_string
logger.log("Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
data = self.get_url(self.urls["api"], params=search_params, returns="json")
if not isinstance(data, dict):
logger.log("No data returned from provider", logger.DEBUG)
continue
error = data.get("error")
error_code = data.get("error_code")
# Don't log when {"error":"No results found","error_code":20}
# List of errors: https://github.com/rarbg/torrentapi/issues/1#issuecomment-114763312
if error:
if try_int(error_code) != 20:
logger.log(error)
continue
torrent_results = data.get("torrent_results")
if not torrent_results:
logger.log("Data returned from provider does not contain any torrents", logger.DEBUG)
continue
for item in torrent_results:
try:
title = item.pop("title")
download_url = item.pop("download")
if not all([title, download_url]):
continue
seeders = item.pop("seeders")
leechers = item.pop("leechers")
if seeders < self.minseed or leechers < self.minleech:
if mode != "RSS":
logger.log("Discarding torrent because it doesn't meet the"
" minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = item.pop("size", -1)
size = convert_size(torrent_size) or -1
torrent_hash = self.hash_from_magnet(download_url)
if mode != "RSS":
logger.log("Found result: {0} with {1} seeders and {2} leechers".format
(title, seeders, leechers), logger.DEBUG)
result = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': torrent_hash}
items.append(result)
except StandardError:
continue
# For each search mode sort all the items by seeders
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
provider = RarbgProvider()
|
gpl-3.0
| 7,859,844,668,399,947,000
| 38.333333
| 149
| 0.548118
| false
| 4.234793
| false
| false
| false
|
JarbasAI/jarbas-core
|
mycroft/jarbas-skills/skill_trivia/__init__.py
|
1
|
10505
|
from adapt.intent import IntentBuilder
from mycroft.skills.core import MycroftSkill
import random, math, os, sys
from os.path import dirname
path= dirname(dirname(__file__))
sys.path.append(path)
# import intent layers
from service_intent_layer import IntentParser
__author__ = 'jarbas'
class MathQuestions:
def __init__(self):
self.questions = []
self.init_questions()
def ft(self, text, randints, randdec, randsgnint):
return text.format(randints, randdec, randsgnint)
def init_questions(self):
# TODO more questions / equation types
self.questions.append(["Convert {1[0]:0.2f} centimeters into meters.", "{1[0]}*0.01"])
self.questions.append([
"What is the length of the line segment with endpoints ({2[1]},{2[2]}) and ({2[3]},{2[4]})?",
"math.sqrt(({2[3]}-{2[1]})**2 + ({2[4]}-{2[2]})**2)"])
self.questions.append(["Solve for x in the equation {2[1]}x - {0[2]} = {2[7]}", "({2[7]}+{0[2]})*1./{2[1]}"])
def ask_question(self):
question = random.choice(self.questions)
answer = question[1]
question = question[0]
question, answer = self.answer_question(question, answer)
return question, answer
def answer_question(self, question, answer):
randints = []
randdec = []
randsgnint = []
for a in range(1,
11): # Creates three arrays of whole numbers, random decimals, and random signed integers for use in questions.
randints.append(random.randint(1, 10))
randdec.append(math.sqrt(random.randint(1, 100)) * random.randint(1, 10))
randsgnint.append(random.randint(-10, 10))
question = self.ft(question, randints, randdec,
randsgnint) # The function replaces all symbols in the question with the correct number types
answer = eval(self.ft(answer, randints, randdec,
randsgnint)) # This stores the numerical answer based on the string provided with the answer.
return question, answer
class TriviaQuestions:
def __init__(self):
self.questions = {} #"categorie" : [[question, answer], [question, answer]]
self.categories = ["general", "geography", "history", "literature", "movies", "music", "science", "sports"]
self.load_questions()
def load_questions(self):
for cat in self.categories:
questions = []
answers = []
path = os.path.dirname(__file__) + '/' + cat + ".txt"
with open(path) as f:
lines = f.readlines()
i = 1
for line in lines:
if i % 2 == 0:
answers.append(line)
else:
questions.append(line)
i += 1
self.questions[cat] = []
for i in range(len(questions)):
self.questions[cat].append([questions[i], answers[i]])
def ask_question(self, categorie="general"):
question = random.choice(self.questions[categorie])
answer = question[1]
question = question[0]
return question, answer
class TriviaSkill(MycroftSkill):
def __init__(self):
super(TriviaSkill, self).__init__(name="TriviaSkill")
# initialize your variables
self.quizz = False
self.continuous = False
self.math = MathQuestions()
self.trivia = TriviaQuestions()
self.answer = None
self.categorie = "all"
self.categories = ["math", "general", "geography", "history", "literature", "movies", "music", "science", "sports"]
def initialize(self):
self.intent_parser = IntentParser(self.emitter)
# register intents
self.build_intents()
def build_intents(self):
# build
trivia_intent = IntentBuilder("TriviaGameIntent") \
.require("triviastart").build()
cat_intent = IntentBuilder("TriviaCategorieIntent") \
.require("Categorie").build()
geography_intent = IntentBuilder("GeographyQuestionIntent") \
.require("geography").build()
history_intent = IntentBuilder("HistoryQuestionIntent") \
.require("history").build()
literature_intent = IntentBuilder("LiteratureQuestionIntent") \
.require("literature").build()
math_intent = IntentBuilder("MathQuestionIntent") \
.require("math").build()
movie_intent = IntentBuilder("MovieQuestionIntent") \
.require("movie").build()
music_intent = IntentBuilder("MusicQuestionIntent") \
.require("music").build()
science_intent = IntentBuilder("ScienceQuestionIntent") \
.require("science").build()
sports_intent = IntentBuilder("SportsQuestionIntent") \
.require("sports").build()
general_intent = IntentBuilder("QuestionIntent") \
.require("question").build()
stop_intent = IntentBuilder("StopTriviaIntent") \
.require("stoptrivia").build()
# register
self.register_intent(trivia_intent,
self.handle_trivia_game_start)
self.register_intent(geography_intent,
self.handle_geography_question)
self.register_intent(history_intent,
self.handle_history_question)
self.register_intent(literature_intent,
self.handle_literature_question)
self.register_intent(math_intent,
self.handle_math_question)
self.register_intent(movie_intent,
self.handle_movies_question)
self.register_intent(music_intent,
self.handle_music_question)
self.register_intent(science_intent,
self.handle_science_question)
self.register_intent(sports_intent,
self.handle_sports_question)
self.register_intent(general_intent,
self.handle_general_question)
self.register_intent(cat_intent,
self.handle_change_cat_intent)
self.register_intent(stop_intent,
self.handle_stop_quizz)
def random_question(self):
if self.categorie == "math":
self.quizz = True
question, self.answer = self.math.ask_question()
elif self.categorie == "all":
self.quizz = True
cat = random.choice(self.categories)
if cat == "math":
question, self.answer = self.math.ask_question()
else:
question, self.answer = self.trivia.ask_question(cat)
else:
self.quizz = True
question, self.answer = self.trivia.ask_question(self.categorie)
return question
def handle_trivia_game_start(self, message):
if self.categorie == "all":
self.categorie = random.choice(self.categories)
self.speak_dialog("trivia", {"cat": self.categorie, "question":self.random_question()})
self.continuous = True
def handle_change_cat_intent(self, message):
cat = message.data["Categorie"].replace(" ","").replace('"',"")
if cat in self.categories:
self.categorie = cat
self.speak_dialog("categorie", {"cat": self.categorie})
else:
self.speak(cat + " is an invalid categorie")
def handle_math_question(self, message):
self.quizz = True
question, self.answer = self.math.ask_question()
self.speak(question, expect_response=True)
def handle_sports_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("sports")
self.speak(question, expect_response=True)
def handle_movies_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("movies")
self.speak(question, expect_response=True)
def handle_music_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("music")
self.speak(question, expect_response=True)
def handle_literature_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("literature")
self.speak(question, expect_response=True)
def handle_history_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("history")
self.speak(question, expect_response=True)
def handle_geography_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("geography")
self.speak(question, expect_response=True)
def handle_science_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question("science")
self.speak(question, expect_response=True)
def handle_general_question(self, message):
self.quizz = True
question, self.answer = self.trivia.ask_question()
self.speak(question, expect_response=True)
def handle_stop_quizz(self, message):
self.stop()
def stop(self):
if self.quizz or self.continuous:
self.speak("Exiting Quizz mode")
self.quizz = False
self.continuous = False
self.answer = None
self.categorie = "all"
def converse(self, transcript, lang="en-us"):
# check if some of the intents will be handled
intent, id = self.intent_parser.determine_intent(transcript[0])
if id == self.skill_id:
# intent from this skill will be triggered
# only stop, change categorie, specific questions intents available
pass
elif self.continuous and self.answer is not None:
self.speak_dialog("trivianext", {"ans" : str(self.answer), "question":self.random_question()}, expect_response=True)
return True
elif self.quizz and self.answer is not None:
self.speak("the correct answer is " + str(self.answer), expect_response=True)
self.quizz = False
self.answer = None
return True
return False
def create_skill():
return TriviaSkill()
|
gpl-3.0
| -7,013,586,128,043,303,000
| 38.645283
| 135
| 0.58991
| false
| 4.029536
| false
| false
| false
|
ZeitgeberH/nengo
|
nengo/tests/test_config.py
|
1
|
3332
|
import pytest
import nengo
import nengo.config
def test_config():
@nengo.config.configures(nengo.Ensemble)
class TestConfigEnsemble(nengo.config.ConfigItem):
something = nengo.config.Parameter(None)
other = nengo.config.Parameter(0)
@nengo.config.configures(nengo.Connection)
class TestConfigConnection(nengo.config.ConfigItem):
something_else = nengo.config.Parameter(None)
class TestConfig(nengo.config.Config):
config_items = [TestConfigEnsemble, TestConfigConnection]
model = nengo.Network()
with model:
a = nengo.Ensemble(nengo.LIF(50), 1)
b = nengo.Ensemble(nengo.LIF(90), 1)
a2b = nengo.Connection(a, b, synapse=0.01)
config = TestConfig()
assert config[a].something is None
assert config[b].something is None
assert config[a].other == 0
assert config[b].other == 0
assert config[a2b].something_else is None
config[a].something = 'hello'
assert config[a].something == 'hello'
config[a].something = 'world'
assert config[a].something == 'world'
with pytest.raises(AttributeError):
config[a].something_else
config[a2b].something
with pytest.raises(AttributeError):
config[a].something_else = 1
config[a2b].something = 1
with pytest.raises(KeyError):
config['a'].something
with pytest.raises(KeyError):
config[None].something
with pytest.raises(KeyError):
config[model].something
def test_parameter_checking():
class PositiveParameter(nengo.config.Parameter):
def __set__(self, instance, value):
if not isinstance(value, (int, float)) or value <= 0:
raise AttributeError('value must be positive')
super(PositiveParameter, self).__set__(instance, value)
@nengo.config.configures(nengo.Ensemble)
class TestConfigEnsemble(nengo.config.ConfigItem):
number = PositiveParameter(1)
model = nengo.Network()
with model:
a = nengo.Ensemble(50, 1)
b = nengo.Ensemble(90, 1)
class TestConfig(nengo.config.Config):
config_items = [TestConfigEnsemble]
config = TestConfig()
config[a].number = 3
with pytest.raises(AttributeError):
config[a].number = 0
with pytest.raises(AttributeError):
config[b].number = 'a'
def test_invalid_config():
@nengo.config.configures(nengo.Ensemble)
class TestConfigEnsemble(nengo.config.ConfigItem):
number = nengo.config.Parameter(1)
class TestBadConfigConnection(nengo.config.ConfigItem):
number = nengo.config.Parameter(1)
with pytest.raises(AttributeError):
class TestConfig(nengo.config.Config):
pass
TestConfig()
with pytest.raises(AttributeError):
class TestConfig(nengo.config.Config):
config_items = [1, 2, 3]
TestConfig()
with pytest.raises(AttributeError):
class TestConfig(nengo.config.Config):
config_items = [TestBadConfigConnection]
TestConfig()
with pytest.raises(AttributeError):
class TestConfig(nengo.config.Config):
config_items = [TestConfigEnsemble, TestBadConfigConnection]
TestConfig()
if __name__ == '__main__':
nengo.log(debug=True)
pytest.main([__file__, '-v'])
|
gpl-3.0
| 283,863,634,523,770,460
| 29.568807
| 72
| 0.654862
| false
| 3.653509
| true
| false
| false
|
FedoraScientific/salome-geom
|
src/GEOM_SWIG/geomBuilder.py
|
1
|
644045
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2007-2014 CEA/DEN, EDF R&D, OPEN CASCADE
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# GEOM GEOM_SWIG : binding of C++ implementation with Python
# File : geomBuilder.py
# Author : Paul RASCLE, EDF
# Module : GEOM
"""
\namespace geomBuilder
\brief Module geomBuilder
"""
##
## @defgroup geomBuilder geomBuilder Python module
## @{
##
## @details
##
## By default, all functions of geomBuilder Python module do not publish
## resulting geometrical objects. This can be done in the Python script
## by means of \ref geomBuilder.geomBuilder.addToStudy() "addToStudy()"
## or \ref geomBuilder.geomBuilder.addToStudyInFather() "addToStudyInFather()"
## functions.
##
## However, it is possible to publish result data in the study
## automatically. For this, almost each function of
## \ref geomBuilder.geomBuilder "geomBuilder" class has
## an additional @a theName parameter (@c None by default).
## As soon as non-empty string value is passed to this parameter,
## the result object is published in the study automatically.
##
## For example, consider the following Python script:
##
## @code
## import salome
## from salome.geom import geomBuilder
## geompy = geomBuilder.New(salome.myStudy)
## box = geompy.MakeBoxDXDYDZ(100, 100, 100) # box is not published in the study yet
## geompy.addToStudy(box, "box") # explicit publishing
## @endcode
##
## Last two lines can be replaced by one-line instruction:
##
## @code
## box = geompy.MakeBoxDXDYDZ(100, 100, 100, theName="box") # box is published in the study with "box" name
## @endcode
##
## ... or simply
##
## @code
## box = geompy.MakeBoxDXDYDZ(100, 100, 100, "box") # box is published in the study with "box" name
## @endcode
##
## Note, that some functions produce more than one geometrical objects. For example,
## \ref geomBuilder.geomBuilder.GetNonBlocks() "GetNonBlocks()" function returns two objects:
## group of all non-hexa solids and group of all non-quad faces.
## For such functions it is possible to specify separate names for results.
##
## For example
##
## @code
## # create and publish cylinder
## cyl = geompy.MakeCylinderRH(100, 100, "cylinder")
## # get non blocks from cylinder
## g1, g2 = geompy.GetNonBlocks(cyl, "nonblock")
## @endcode
##
## Above example will publish both result compounds (first with non-hexa solids and
## second with non-quad faces) as two items, both named "nonblock".
## However, if second command is invoked as
##
## @code
## g1, g2 = geompy.GetNonBlocks(cyl, ("nonhexa", "nonquad"))
## @endcode
##
## ... the first compound will be published with "nonhexa" name, and second will be named "nonquad".
##
## Automatic publication of all results can be also enabled/disabled by means of the function
## \ref geomBuilder.geomBuilder.addToStudyAuto() "addToStudyAuto()". The automatic publishing
## is managed by the numeric parameter passed to this function:
## - if @a maxNbSubShapes = 0, automatic publishing is disabled.
## - if @a maxNbSubShapes = -1 (default), automatic publishing is enabled and
## maximum number of sub-shapes allowed for publishing is unlimited; any negative
## value passed as parameter has the same effect.
## - if @a maxNbSubShapes is any positive value, automatic publishing is enabled and
## maximum number of sub-shapes allowed for publishing is set to specified value.
##
## When automatic publishing is enabled, you even do not need to pass @a theName parameter
## to the functions creating objects, instead default names will be used. However, you
## can always change the behavior, by passing explicit name to the @a theName parameter
## and it will be used instead default one.
## The publishing of the collections of objects will be done according to the above
## mentioned rules (maximum allowed number of sub-shapes).
##
## For example:
##
## @code
## import salome
## from salome.geom import geomBuilder
## geompy = geomBuilder.New(salome.myStudy)
## geompy.addToStudyAuto() # enable automatic publication
## box = geompy.MakeBoxDXDYDZ(100, 100, 100)
## # the box is created and published in the study with default name
## geompy.addToStudyAuto(5) # set max allowed number of sub-shapes to 5
## vertices = geompy.SubShapeAll(box, geomBuilder.ShapeType['VERTEX'])
## # only 5 first vertices will be published, with default names
## print len(vertices)
## # note, that result value still containes all 8 vertices
## geompy.addToStudyAuto(-1) # disable automatic publication
## @endcode
##
## This feature can be used, for example, for debugging purposes.
##
## @note
## - Use automatic publication feature with caution. When it is enabled, any function of
## \ref geomBuilder.geomBuilder "geomBuilder" class publishes the results in the study,
## that can lead to the huge size of the study data tree.
## For example, repeating call of \ref geomBuilder.geomBuilder.SubShapeAll() "SubShapeAll()"
## command on the same main shape each time will publish all child objects, that will lead
## to a lot of duplicated items in the study.
## - Sub-shapes are automatically published as child items of the parent main shape in the study if main
## shape was also published before. Otherwise, sub-shapes are published as top-level objects.
## - Some functions of \ref geomBuilder.geomBuilder "geomBuilder" class do not have
## \a theName parameter (and, thus, do not support automatic publication).
## For example, some transformation operations like
## \ref geomBuilder.geomBuilder.TranslateDXDYDZ() "TranslateDXDYDZ()".
## Refer to the documentation to check if some function has such possibility.
##
## It is possible to customize the representation of the geometrical
## data in the data tree; this can be done by using folders. A folder can
## be created in the study tree using function
## \ref geomBuilder.geomBuilder.NewFolder() "NewFolder()"
## (by default it is created under the "Geometry" root object).
## As soon as folder is created, any published geometry object
## can be moved into it.
##
## For example:
##
## @code
## import salome
## from salome.geom import geomBuilder
## geompy = geomBuilder.New(salome.myStudy)
## box = geompy.MakeBoxDXDYDZ(100, 100, 100, "Box")
## # the box was created and published in the study
## folder = geompy.NewFolder("Primitives")
## # an empty "Primitives" folder was created under default "Geometry" root object
## geompy.PutToFolder(box, folder)
## # the box was moved into "Primitives" folder
## @endcode
##
## Subfolders are also can be created by specifying another folder as a parent:
##
## @code
## subfolder = geompy.NewFolder("3D", folder)
## # "3D" folder was created under "Primitives" folder
## @endcode
##
## @note
## - Folder container is just a representation layer object that
## deals with already published objects only. So, any geometry object
## should be published in the study (for example, with
## \ref geomBuilder.geomBuilder.PutToFolder() "addToStudy()" function)
## BEFORE moving it into any existing folder.
## - \ref geomBuilder.geomBuilder.PutToFolder() "PutToFolder()" function
## does not change physical position of geometry object in the study tree,
## it only affects on the representation of the data tree.
## - It is impossible to publish geometry object using any folder as father.
##
## \defgroup l1_publish_data
## \defgroup l1_geomBuilder_auxiliary
## \defgroup l1_geomBuilder_purpose
## @}
## @defgroup l1_publish_data Publishing results in SALOME study
## @defgroup l1_geomBuilder_auxiliary Auxiliary data structures and methods
## @defgroup l1_geomBuilder_purpose All package methods, grouped by their purpose
## @{
## @defgroup l2_import_export Importing/exporting geometrical objects
## @defgroup l2_creating Creating geometrical objects
## @{
## @defgroup l3_basic_go Creating Basic Geometric Objects
## @{
## @defgroup l4_curves Creating Curves
## @}
## @defgroup l3_3d_primitives Creating 3D Primitives
## @defgroup l3_complex Creating Complex Objects
## @defgroup l3_groups Working with groups
## @defgroup l3_blocks Building by blocks
## @{
## @defgroup l4_blocks_measure Check and Improve
## @}
## @defgroup l3_sketcher Sketcher
## @defgroup l3_advanced Creating Advanced Geometrical Objects
## @{
## @defgroup l4_decompose Decompose objects
## @defgroup l4_decompose_d Decompose objects deprecated methods
## @defgroup l4_access Access to sub-shapes by their unique IDs inside the main shape
## @defgroup l4_obtain Access to sub-shapes by a criteria
## @defgroup l4_advanced Advanced objects creation functions
## @}
## @}
## @defgroup l2_transforming Transforming geometrical objects
## @{
## @defgroup l3_basic_op Basic Operations
## @defgroup l3_boolean Boolean Operations
## @defgroup l3_transform Transformation Operations
## @defgroup l3_transform_d Transformation Operations deprecated methods
## @defgroup l3_local Local Operations (Fillet, Chamfer and other Features)
## @defgroup l3_blocks_op Blocks Operations
## @defgroup l3_healing Repairing Operations
## @defgroup l3_restore_ss Restore presentation parameters and a tree of sub-shapes
## @}
## @defgroup l2_measure Using measurement tools
## @defgroup l2_field Field on Geometry
## @}
# initialize SALOME session in try/except block
# to avoid problems in some cases, e.g. when generating documentation
try:
import salome
salome.salome_init()
from salome import *
except:
pass
from salome_notebook import *
import GEOM
import math
import os
import functools
from salome.geom.gsketcher import Sketcher3D, Sketcher2D, Polyline2D
# service function
def _toListOfNames(_names, _size=-1):
l = []
import types
if type(_names) in [types.ListType, types.TupleType]:
for i in _names: l.append(i)
elif _names:
l.append(_names)
if l and len(l) < _size:
for i in range(len(l), _size): l.append("%s_%d"%(l[0],i))
return l
# Decorator function to manage transactions for all geometric operations.
def ManageTransactions(theOpeName):
def MTDecorator(theFunction):
# To keep the original function name an documentation.
@functools.wraps(theFunction)
def OpenCallClose(self, *args, **kwargs):
# Open transaction
anOperation = getattr(self, theOpeName)
anOperation.StartOperation()
try:
# Call the function
res = theFunction(self, *args, **kwargs)
# Commit transaction
anOperation.FinishOperation()
return res
except:
# Abort transaction
anOperation.AbortOperation()
raise
return OpenCallClose
return MTDecorator
## Raise an Error, containing the Method_name, if Operation is Failed
## @ingroup l1_geomBuilder_auxiliary
def RaiseIfFailed (Method_name, Operation):
if Operation.IsDone() == 0 and Operation.GetErrorCode() != "NOT_FOUND_ANY":
raise RuntimeError, Method_name + " : " + Operation.GetErrorCode()
## Return list of variables value from salome notebook
## @ingroup l1_geomBuilder_auxiliary
def ParseParameters(*parameters):
Result = []
StringResult = []
for parameter in parameters:
if isinstance(parameter, list):
lResults = ParseParameters(*parameter)
if len(lResults) > 0:
Result.append(lResults[:-1])
StringResult += lResults[-1].split(":")
pass
pass
else:
if isinstance(parameter,str):
if notebook.isVariable(parameter):
Result.append(notebook.get(parameter))
else:
raise RuntimeError, "Variable with name '" + parameter + "' doesn't exist!!!"
pass
else:
Result.append(parameter)
pass
StringResult.append(str(parameter))
pass
pass
if Result:
Result.append(":".join(StringResult))
else:
Result = ":".join(StringResult)
return Result
## Return list of variables value from salome notebook
## @ingroup l1_geomBuilder_auxiliary
def ParseList(list):
Result = []
StringResult = ""
for parameter in list:
if isinstance(parameter,str) and notebook.isVariable(parameter):
Result.append(str(notebook.get(parameter)))
pass
else:
Result.append(str(parameter))
pass
StringResult = StringResult + str(parameter)
StringResult = StringResult + ":"
pass
StringResult = StringResult[:len(StringResult)-1]
return Result, StringResult
## Return list of variables value from salome notebook
## @ingroup l1_geomBuilder_auxiliary
def ParseSketcherCommand(command):
Result = ""
StringResult = ""
sections = command.split(":")
for section in sections:
parameters = section.split(" ")
paramIndex = 1
for parameter in parameters:
if paramIndex > 1 and parameter.find("'") != -1:
parameter = parameter.replace("'","")
if notebook.isVariable(parameter):
Result = Result + str(notebook.get(parameter)) + " "
pass
else:
raise RuntimeError, "Variable with name '" + parameter + "' doesn't exist!!!"
pass
pass
else:
Result = Result + str(parameter) + " "
pass
if paramIndex > 1:
StringResult = StringResult + parameter
StringResult = StringResult + ":"
pass
paramIndex = paramIndex + 1
pass
Result = Result[:len(Result)-1] + ":"
pass
Result = Result[:len(Result)-1]
return Result, StringResult
## Helper function which can be used to pack the passed string to the byte data.
## Only '1' an '0' symbols are valid for the string. The missing bits are replaced by zeroes.
## If the string contains invalid symbol (neither '1' nor '0'), the function raises an exception.
## For example,
## \code
## val = PackData("10001110") # val = 0xAE
## val = PackData("1") # val = 0x80
## \endcode
## @param data unpacked data - a string containing '1' and '0' symbols
## @return data packed to the byte stream
## @ingroup l1_geomBuilder_auxiliary
def PackData(data):
"""
Helper function which can be used to pack the passed string to the byte data.
Only '1' an '0' symbols are valid for the string. The missing bits are replaced by zeroes.
If the string contains invalid symbol (neither '1' nor '0'), the function raises an exception.
Parameters:
data unpacked data - a string containing '1' and '0' symbols
Returns:
data packed to the byte stream
Example of usage:
val = PackData("10001110") # val = 0xAE
val = PackData("1") # val = 0x80
"""
bytes = len(data)/8
if len(data)%8: bytes += 1
res = ""
for b in range(bytes):
d = data[b*8:(b+1)*8]
val = 0
for i in range(8):
val *= 2
if i < len(d):
if d[i] == "1": val += 1
elif d[i] != "0":
raise "Invalid symbol %s" % d[i]
pass
pass
res += chr(val)
pass
return res
## Read bitmap texture from the text file.
## In that file, any non-zero symbol represents '1' opaque pixel of the bitmap.
## A zero symbol ('0') represents transparent pixel of the texture bitmap.
## The function returns width and height of the pixmap in pixels and byte stream representing
## texture bitmap itself.
##
## This function can be used to read the texture to the byte stream in order to pass it to
## the AddTexture() function of geomBuilder class.
## For example,
## \code
## from salome.geom import geomBuilder
## geompy = geomBuilder.New(salome.myStudy)
## texture = geompy.readtexture('mytexture.dat')
## texture = geompy.AddTexture(*texture)
## obj.SetMarkerTexture(texture)
## \endcode
## @param fname texture file name
## @return sequence of tree values: texture's width, height in pixels and its byte stream
## @ingroup l1_geomBuilder_auxiliary
def ReadTexture(fname):
"""
Read bitmap texture from the text file.
In that file, any non-zero symbol represents '1' opaque pixel of the bitmap.
A zero symbol ('0') represents transparent pixel of the texture bitmap.
The function returns width and height of the pixmap in pixels and byte stream representing
texture bitmap itself.
This function can be used to read the texture to the byte stream in order to pass it to
the AddTexture() function of geomBuilder class.
Parameters:
fname texture file name
Returns:
sequence of tree values: texture's width, height in pixels and its byte stream
Example of usage:
from salome.geom import geomBuilder
geompy = geomBuilder.New(salome.myStudy)
texture = geompy.readtexture('mytexture.dat')
texture = geompy.AddTexture(*texture)
obj.SetMarkerTexture(texture)
"""
try:
f = open(fname)
lines = [ l.strip() for l in f.readlines()]
f.close()
maxlen = 0
if lines: maxlen = max([len(x) for x in lines])
lenbytes = maxlen/8
if maxlen%8: lenbytes += 1
bytedata=""
for line in lines:
if len(line)%8:
lenline = (len(line)/8+1)*8
pass
else:
lenline = (len(line)/8)*8
pass
for i in range(lenline/8):
byte=""
for j in range(8):
if i*8+j < len(line) and line[i*8+j] != "0": byte += "1"
else: byte += "0"
pass
bytedata += PackData(byte)
pass
for i in range(lenline/8, lenbytes):
bytedata += PackData("0")
pass
return lenbytes*8, len(lines), bytedata
except:
pass
return 0, 0, ""
## Returns a long value from enumeration type
# Can be used for CORBA enumerator types like GEOM.shape_type
# @param theItem enumeration type
# @ingroup l1_geomBuilder_auxiliary
def EnumToLong(theItem):
"""
Returns a long value from enumeration type
Can be used for CORBA enumerator types like geomBuilder.ShapeType
Parameters:
theItem enumeration type
"""
ret = theItem
if hasattr(theItem, "_v"): ret = theItem._v
return ret
## Information about closed/unclosed state of shell or wire
# @ingroup l1_geomBuilder_auxiliary
class info:
"""
Information about closed/unclosed state of shell or wire
"""
UNKNOWN = 0
CLOSED = 1
UNCLOSED = 2
## Private class used to bind calls of plugin operations to geomBuilder
class PluginOperation:
def __init__(self, operation, function):
self.operation = operation
self.function = function
pass
@ManageTransactions("operation")
def __call__(self, *args):
res = self.function(self.operation, *args)
RaiseIfFailed(self.function.__name__, self.operation)
return res
# Warning: geom is a singleton
geom = None
engine = None
doLcc = False
created = False
class geomBuilder(object, GEOM._objref_GEOM_Gen):
## Enumeration ShapeType as a dictionary. \n
## Topological types of shapes (like Open Cascade types). See GEOM::shape_type for details.
# @ingroup l1_geomBuilder_auxiliary
ShapeType = {"AUTO":-1, "COMPOUND":0, "COMPSOLID":1, "SOLID":2, "SHELL":3, "FACE":4, "WIRE":5, "EDGE":6, "VERTEX":7, "SHAPE":8}
## Kinds of shape in terms of <VAR>GEOM.GEOM_IKindOfShape.shape_kind</VAR> enumeration
# and a list of parameters, describing the shape.
# List of parameters, describing the shape:
# - COMPOUND: [nb_solids nb_faces nb_edges nb_vertices]
# - COMPSOLID: [nb_solids nb_faces nb_edges nb_vertices]
#
# - SHELL: [info.CLOSED / info.UNCLOSED nb_faces nb_edges nb_vertices]
#
# - WIRE: [info.CLOSED / info.UNCLOSED nb_edges nb_vertices]
#
# - SPHERE: [xc yc zc R]
# - CYLINDER: [xb yb zb dx dy dz R H]
# - BOX: [xc yc zc ax ay az]
# - ROTATED_BOX: [xc yc zc zx zy zz xx xy xz ax ay az]
# - TORUS: [xc yc zc dx dy dz R_1 R_2]
# - CONE: [xb yb zb dx dy dz R_1 R_2 H]
# - POLYHEDRON: [nb_faces nb_edges nb_vertices]
# - SOLID: [nb_faces nb_edges nb_vertices]
#
# - SPHERE2D: [xc yc zc R]
# - CYLINDER2D: [xb yb zb dx dy dz R H]
# - TORUS2D: [xc yc zc dx dy dz R_1 R_2]
# - CONE2D: [xc yc zc dx dy dz R_1 R_2 H]
# - DISK_CIRCLE: [xc yc zc dx dy dz R]
# - DISK_ELLIPSE: [xc yc zc dx dy dz R_1 R_2]
# - POLYGON: [xo yo zo dx dy dz nb_edges nb_vertices]
# - PLANE: [xo yo zo dx dy dz]
# - PLANAR: [xo yo zo dx dy dz nb_edges nb_vertices]
# - FACE: [nb_edges nb_vertices]
#
# - CIRCLE: [xc yc zc dx dy dz R]
# - ARC_CIRCLE: [xc yc zc dx dy dz R x1 y1 z1 x2 y2 z2]
# - ELLIPSE: [xc yc zc dx dy dz R_1 R_2]
# - ARC_ELLIPSE: [xc yc zc dx dy dz R_1 R_2 x1 y1 z1 x2 y2 z2]
# - LINE: [xo yo zo dx dy dz]
# - SEGMENT: [x1 y1 z1 x2 y2 z2]
# - EDGE: [nb_vertices]
#
# - VERTEX: [x y z]
# @ingroup l1_geomBuilder_auxiliary
kind = GEOM.GEOM_IKindOfShape
def __new__(cls):
global engine
global geom
global doLcc
global created
#print "==== __new__ ", engine, geom, doLcc, created
if geom is None:
# geom engine is either retrieved from engine, or created
geom = engine
# Following test avoids a recursive loop
if doLcc:
if geom is not None:
# geom engine not created: existing engine found
doLcc = False
if doLcc and not created:
doLcc = False
# FindOrLoadComponent called:
# 1. CORBA resolution of server
# 2. the __new__ method is called again
#print "==== FindOrLoadComponent ", engine, geom, doLcc, created
geom = lcc.FindOrLoadComponent( "FactoryServer", "GEOM" )
#print "====1 ",geom
else:
# FindOrLoadComponent not called
if geom is None:
# geomBuilder instance is created from lcc.FindOrLoadComponent
#print "==== super ", engine, geom, doLcc, created
geom = super(geomBuilder,cls).__new__(cls)
#print "====2 ",geom
else:
# geom engine not created: existing engine found
#print "==== existing ", engine, geom, doLcc, created
pass
#print "return geom 1 ", geom
return geom
#print "return geom 2 ", geom
return geom
def __init__(self):
global created
#print "-------- geomBuilder __init__ --- ", created, self
if not created:
created = True
GEOM._objref_GEOM_Gen.__init__(self)
self.myMaxNbSubShapesAllowed = 0 # auto-publishing is disabled by default
self.myBuilder = None
self.myStudyId = 0
self.father = None
self.BasicOp = None
self.CurvesOp = None
self.PrimOp = None
self.ShapesOp = None
self.HealOp = None
self.InsertOp = None
self.BoolOp = None
self.TrsfOp = None
self.LocalOp = None
self.MeasuOp = None
self.BlocksOp = None
self.GroupOp = None
self.FieldOp = None
pass
## Process object publication in the study, as follows:
# - if @a theName is specified (not None), the object is published in the study
# with this name, not taking into account "auto-publishing" option;
# - if @a theName is NOT specified, the object is published in the study
# (using default name, which can be customized using @a theDefaultName parameter)
# only if auto-publishing is switched on.
#
# @param theObj object, a subject for publishing
# @param theName object name for study
# @param theDefaultName default name for the auto-publishing
#
# @sa addToStudyAuto()
def _autoPublish(self, theObj, theName, theDefaultName="noname"):
# ---
def _item_name(_names, _defname, _idx=-1):
if not _names: _names = _defname
if type(_names) in [types.ListType, types.TupleType]:
if _idx >= 0:
if _idx >= len(_names) or not _names[_idx]:
if type(_defname) not in [types.ListType, types.TupleType]:
_name = "%s_%d"%(_defname, _idx+1)
elif len(_defname) > 0 and _idx >= 0 and _idx < len(_defname):
_name = _defname[_idx]
else:
_name = "%noname_%d"%(dn, _idx+1)
pass
else:
_name = _names[_idx]
pass
else:
# must be wrong usage
_name = _names[0]
pass
else:
if _idx >= 0:
_name = "%s_%d"%(_names, _idx+1)
else:
_name = _names
pass
return _name
# ---
def _publish( _name, _obj ):
fatherObj = None
if isinstance( _obj, GEOM._objref_GEOM_Field ):
fatherObj = _obj.GetShape()
elif isinstance( _obj, GEOM._objref_GEOM_FieldStep ):
fatherObj = _obj.GetField()
elif not _obj.IsMainShape():
fatherObj = _obj.GetMainShape()
pass
if fatherObj and fatherObj.GetStudyEntry():
self.addToStudyInFather(fatherObj, _obj, _name)
else:
self.addToStudy(_obj, _name)
pass
return
# ---
if not theObj:
return # null object
if not theName and not self.myMaxNbSubShapesAllowed:
return # nothing to do: auto-publishing is disabled
if not theName and not theDefaultName:
return # neither theName nor theDefaultName is given
import types
if type(theObj) in [types.ListType, types.TupleType]:
# list of objects is being published
idx = 0
for obj in theObj:
if not obj: continue # bad object
name = _item_name(theName, theDefaultName, idx)
_publish( name, obj )
idx = idx+1
if not theName and idx == self.myMaxNbSubShapesAllowed: break
pass
pass
else:
# single object is published
name = _item_name(theName, theDefaultName)
_publish( name, theObj )
pass
## @addtogroup l1_geomBuilder_auxiliary
## @{
def init_geom(self,theStudy):
self.myStudy = theStudy
self.myStudyId = self.myStudy._get_StudyId()
self.myBuilder = self.myStudy.NewBuilder()
self.father = self.myStudy.FindComponent("GEOM")
notebook.myStudy = theStudy
if self.father is None:
self.father = self.myBuilder.NewComponent("GEOM")
A1 = self.myBuilder.FindOrCreateAttribute(self.father, "AttributeName")
FName = A1._narrow(SALOMEDS.AttributeName)
FName.SetValue("Geometry")
A2 = self.myBuilder.FindOrCreateAttribute(self.father, "AttributePixMap")
aPixmap = A2._narrow(SALOMEDS.AttributePixMap)
aPixmap.SetPixMap("ICON_OBJBROWSER_Geometry")
self.myBuilder.DefineComponentInstance(self.father,self)
pass
self.BasicOp = self.GetIBasicOperations (self.myStudyId)
self.CurvesOp = self.GetICurvesOperations (self.myStudyId)
self.PrimOp = self.GetI3DPrimOperations (self.myStudyId)
self.ShapesOp = self.GetIShapesOperations (self.myStudyId)
self.HealOp = self.GetIHealingOperations (self.myStudyId)
self.InsertOp = self.GetIInsertOperations (self.myStudyId)
self.BoolOp = self.GetIBooleanOperations (self.myStudyId)
self.TrsfOp = self.GetITransformOperations(self.myStudyId)
self.LocalOp = self.GetILocalOperations (self.myStudyId)
self.MeasuOp = self.GetIMeasureOperations (self.myStudyId)
self.BlocksOp = self.GetIBlocksOperations (self.myStudyId)
self.GroupOp = self.GetIGroupOperations (self.myStudyId)
self.FieldOp = self.GetIFieldOperations (self.myStudyId)
# set GEOM as root in the use case tree
self.myUseCaseBuilder = self.myStudy.GetUseCaseBuilder()
self.myUseCaseBuilder.SetRootCurrent()
self.myUseCaseBuilder.Append(self.father)
pass
def GetPluginOperations(self, studyID, libraryName):
op = GEOM._objref_GEOM_Gen.GetPluginOperations(self, studyID, libraryName)
return op
## Enable / disable results auto-publishing
#
# The automatic publishing is managed in the following way:
# - if @a maxNbSubShapes = 0, automatic publishing is disabled.
# - if @a maxNbSubShapes = -1 (default), automatic publishing is enabled and
# maximum number of sub-shapes allowed for publishing is unlimited; any negative
# value passed as parameter has the same effect.
# - if @a maxNbSubShapes is any positive value, automatic publishing is enabled and
# maximum number of sub-shapes allowed for publishing is set to specified value.
#
# @param maxNbSubShapes maximum number of sub-shapes allowed for publishing.
# @ingroup l1_publish_data
def addToStudyAuto(self, maxNbSubShapes=-1):
"""
Enable / disable results auto-publishing
The automatic publishing is managed in the following way:
- if @a maxNbSubShapes = 0, automatic publishing is disabled;
- if @a maxNbSubShapes = -1 (default), automatic publishing is enabled and
maximum number of sub-shapes allowed for publishing is unlimited; any negative
value passed as parameter has the same effect.
- if @a maxNbSubShapes is any positive value, automatic publishing is enabled and
maximum number of sub-shapes allowed for publishing is set to this value.
Parameters:
maxNbSubShapes maximum number of sub-shapes allowed for publishing.
Example of usage:
geompy.addToStudyAuto() # enable auto-publishing
geompy.MakeBoxDXDYDZ(100) # box is created and published with default name
geompy.addToStudyAuto(0) # disable auto-publishing
"""
self.myMaxNbSubShapesAllowed = max(-1, maxNbSubShapes)
pass
## Dump component to the Python script
# This method overrides IDL function to allow default values for the parameters.
def DumpPython(self, theStudy, theIsPublished=True, theIsMultiFile=True):
"""
Dump component to the Python script
This method overrides IDL function to allow default values for the parameters.
"""
return GEOM._objref_GEOM_Gen.DumpPython(self, theStudy, theIsPublished, theIsMultiFile)
## Get name for sub-shape aSubObj of shape aMainObj
#
# @ref swig_SubShapeName "Example"
@ManageTransactions("ShapesOp")
def SubShapeName(self,aSubObj, aMainObj):
"""
Get name for sub-shape aSubObj of shape aMainObj
"""
# Example: see GEOM_TestAll.py
#aSubId = orb.object_to_string(aSubObj)
#aMainId = orb.object_to_string(aMainObj)
#index = gg.getIndexTopology(aSubId, aMainId)
#name = gg.getShapeTypeString(aSubId) + "_%d"%(index)
index = self.ShapesOp.GetTopologyIndex(aMainObj, aSubObj)
name = self.ShapesOp.GetShapeTypeString(aSubObj) + "_%d"%(index)
return name
## Publish in study aShape with name aName
#
# \param aShape the shape to be published
# \param aName the name for the shape
# \param doRestoreSubShapes if True, finds and publishes also
# sub-shapes of <VAR>aShape</VAR>, corresponding to its arguments
# and published sub-shapes of arguments
# \param theArgs,theFindMethod,theInheritFirstArg see RestoreSubShapes() for
# these arguments description
# \return study entry of the published shape in form of string
#
# @ingroup l1_publish_data
# @ref swig_all_addtostudy "Example"
def addToStudy(self, aShape, aName, doRestoreSubShapes=False,
theArgs=[], theFindMethod=GEOM.FSM_GetInPlace, theInheritFirstArg=False):
"""
Publish in study aShape with name aName
Parameters:
aShape the shape to be published
aName the name for the shape
doRestoreSubShapes if True, finds and publishes also
sub-shapes of aShape, corresponding to its arguments
and published sub-shapes of arguments
theArgs,theFindMethod,theInheritFirstArg see geompy.RestoreSubShapes() for
these arguments description
Returns:
study entry of the published shape in form of string
Example of usage:
id_block1 = geompy.addToStudy(Block1, "Block 1")
"""
# Example: see GEOM_TestAll.py
try:
aSObject = self.AddInStudy(self.myStudy, aShape, aName, None)
if aSObject and aName: aSObject.SetAttrString("AttributeName", aName)
if doRestoreSubShapes:
self.RestoreSubShapesSO(self.myStudy, aSObject, theArgs,
theFindMethod, theInheritFirstArg, True )
except:
print "addToStudy() failed"
return ""
return aShape.GetStudyEntry()
## Publish in study aShape with name aName as sub-object of previously published aFather
# \param aFather previously published object
# \param aShape the shape to be published as sub-object of <VAR>aFather</VAR>
# \param aName the name for the shape
#
# \return study entry of the published shape in form of string
#
# @ingroup l1_publish_data
# @ref swig_all_addtostudyInFather "Example"
def addToStudyInFather(self, aFather, aShape, aName):
"""
Publish in study aShape with name aName as sub-object of previously published aFather
Parameters:
aFather previously published object
aShape the shape to be published as sub-object of aFather
aName the name for the shape
Returns:
study entry of the published shape in form of string
"""
# Example: see GEOM_TestAll.py
try:
aSObject = self.AddInStudy(self.myStudy, aShape, aName, aFather)
if aSObject and aName: aSObject.SetAttrString("AttributeName", aName)
except:
print "addToStudyInFather() failed"
return ""
return aShape.GetStudyEntry()
## Unpublish object in study
#
# \param obj the object to be unpublished
def hideInStudy(self, obj):
"""
Unpublish object in study
Parameters:
obj the object to be unpublished
"""
ior = salome.orb.object_to_string(obj)
aSObject = self.myStudy.FindObjectIOR(ior)
if aSObject is not None:
genericAttribute = self.myBuilder.FindOrCreateAttribute(aSObject, "AttributeDrawable")
drwAttribute = genericAttribute._narrow(SALOMEDS.AttributeDrawable)
drwAttribute.SetDrawable(False)
# hide references if any
vso = self.myStudy.FindDependances(aSObject);
for refObj in vso :
genericAttribute = self.myBuilder.FindOrCreateAttribute(refObj, "AttributeDrawable")
drwAttribute = genericAttribute._narrow(SALOMEDS.AttributeDrawable)
drwAttribute.SetDrawable(False)
pass
pass
# end of l1_geomBuilder_auxiliary
## @}
## @addtogroup l3_restore_ss
## @{
## Publish sub-shapes, standing for arguments and sub-shapes of arguments
# To be used from python scripts out of addToStudy() (non-default usage)
# \param theObject published GEOM.GEOM_Object, arguments of which will be published
# \param theArgs list of GEOM.GEOM_Object, operation arguments to be published.
# If this list is empty, all operation arguments will be published
# \param theFindMethod method to search sub-shapes, corresponding to arguments and
# their sub-shapes. Value from enumeration GEOM.find_shape_method.
# \param theInheritFirstArg set properties of the first argument for <VAR>theObject</VAR>.
# Do not publish sub-shapes in place of arguments, but only
# in place of sub-shapes of the first argument,
# because the whole shape corresponds to the first argument.
# Mainly to be used after transformations, but it also can be
# usefull after partition with one object shape, and some other
# operations, where only the first argument has to be considered.
# If theObject has only one argument shape, this flag is automatically
# considered as True, not regarding really passed value.
# \param theAddPrefix add prefix "from_" to names of restored sub-shapes,
# and prefix "from_subshapes_of_" to names of partially restored sub-shapes.
# \return list of published sub-shapes
#
# @ref tui_restore_prs_params "Example"
def RestoreSubShapes (self, theObject, theArgs=[], theFindMethod=GEOM.FSM_GetInPlace,
theInheritFirstArg=False, theAddPrefix=True):
"""
Publish sub-shapes, standing for arguments and sub-shapes of arguments
To be used from python scripts out of geompy.addToStudy (non-default usage)
Parameters:
theObject published GEOM.GEOM_Object, arguments of which will be published
theArgs list of GEOM.GEOM_Object, operation arguments to be published.
If this list is empty, all operation arguments will be published
theFindMethod method to search sub-shapes, corresponding to arguments and
their sub-shapes. Value from enumeration GEOM.find_shape_method.
theInheritFirstArg set properties of the first argument for theObject.
Do not publish sub-shapes in place of arguments, but only
in place of sub-shapes of the first argument,
because the whole shape corresponds to the first argument.
Mainly to be used after transformations, but it also can be
usefull after partition with one object shape, and some other
operations, where only the first argument has to be considered.
If theObject has only one argument shape, this flag is automatically
considered as True, not regarding really passed value.
theAddPrefix add prefix "from_" to names of restored sub-shapes,
and prefix "from_subshapes_of_" to names of partially restored sub-shapes.
Returns:
list of published sub-shapes
"""
# Example: see GEOM_TestAll.py
return self.RestoreSubShapesO(self.myStudy, theObject, theArgs,
theFindMethod, theInheritFirstArg, theAddPrefix)
## Publish sub-shapes, standing for arguments and sub-shapes of arguments
# To be used from python scripts out of addToStudy() (non-default usage)
# \param theObject published GEOM.GEOM_Object, arguments of which will be published
# \param theArgs list of GEOM.GEOM_Object, operation arguments to be published.
# If this list is empty, all operation arguments will be published
# \param theFindMethod method to search sub-shapes, corresponding to arguments and
# their sub-shapes. Value from enumeration GEOM::find_shape_method.
# \param theInheritFirstArg set properties of the first argument for <VAR>theObject</VAR>.
# Do not publish sub-shapes in place of arguments, but only
# in place of sub-shapes of the first argument,
# because the whole shape corresponds to the first argument.
# Mainly to be used after transformations, but it also can be
# usefull after partition with one object shape, and some other
# operations, where only the first argument has to be considered.
# If theObject has only one argument shape, this flag is automatically
# considered as True, not regarding really passed value.
# \param theAddPrefix add prefix "from_" to names of restored sub-shapes,
# and prefix "from_subshapes_of_" to names of partially restored sub-shapes.
# \return list of published sub-shapes
#
# @ref tui_restore_prs_params "Example"
def RestoreGivenSubShapes (self, theObject, theArgs=[], theFindMethod=GEOM.FSM_GetInPlace,
theInheritFirstArg=False, theAddPrefix=True):
"""
Publish sub-shapes, standing for arguments and sub-shapes of arguments
To be used from python scripts out of geompy.addToStudy() (non-default usage)
Parameters:
theObject published GEOM.GEOM_Object, arguments of which will be published
theArgs list of GEOM.GEOM_Object, operation arguments to be published.
If this list is empty, all operation arguments will be published
theFindMethod method to search sub-shapes, corresponding to arguments and
their sub-shapes. Value from enumeration GEOM::find_shape_method.
theInheritFirstArg set properties of the first argument for theObject.
Do not publish sub-shapes in place of arguments, but only
in place of sub-shapes of the first argument,
because the whole shape corresponds to the first argument.
Mainly to be used after transformations, but it also can be
usefull after partition with one object shape, and some other
operations, where only the first argument has to be considered.
If theObject has only one argument shape, this flag is automatically
considered as True, not regarding really passed value.
theAddPrefix add prefix "from_" to names of restored sub-shapes,
and prefix "from_subshapes_of_" to names of partially restored sub-shapes.
Returns:
list of published sub-shapes
"""
# Example: see GEOM_TestAll.py
return self.RestoreGivenSubShapesO(self.myStudy, theObject, theArgs,
theFindMethod, theInheritFirstArg, theAddPrefix)
# end of l3_restore_ss
## @}
## @addtogroup l3_basic_go
## @{
## Create point by three coordinates.
# @param theX The X coordinate of the point.
# @param theY The Y coordinate of the point.
# @param theZ The Z coordinate of the point.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref tui_creation_point "Example"
@ManageTransactions("BasicOp")
def MakeVertex(self, theX, theY, theZ, theName=None):
"""
Create point by three coordinates.
Parameters:
theX The X coordinate of the point.
theY The Y coordinate of the point.
theZ The Z coordinate of the point.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
"""
# Example: see GEOM_TestAll.py
theX,theY,theZ,Parameters = ParseParameters(theX, theY, theZ)
anObj = self.BasicOp.MakePointXYZ(theX, theY, theZ)
RaiseIfFailed("MakePointXYZ", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point, distant from the referenced point
# on the given distances along the coordinate axes.
# @param theReference The referenced point.
# @param theX Displacement from the referenced point along OX axis.
# @param theY Displacement from the referenced point along OY axis.
# @param theZ Displacement from the referenced point along OZ axis.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref tui_creation_point "Example"
@ManageTransactions("BasicOp")
def MakeVertexWithRef(self, theReference, theX, theY, theZ, theName=None):
"""
Create a point, distant from the referenced point
on the given distances along the coordinate axes.
Parameters:
theReference The referenced point.
theX Displacement from the referenced point along OX axis.
theY Displacement from the referenced point along OY axis.
theZ Displacement from the referenced point along OZ axis.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
"""
# Example: see GEOM_TestAll.py
theX,theY,theZ,Parameters = ParseParameters(theX, theY, theZ)
anObj = self.BasicOp.MakePointWithReference(theReference, theX, theY, theZ)
RaiseIfFailed("MakePointWithReference", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point, corresponding to the given parameter on the given curve.
# @param theRefCurve The referenced curve.
# @param theParameter Value of parameter on the referenced curve.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref tui_creation_point "Example"
@ManageTransactions("BasicOp")
def MakeVertexOnCurve(self, theRefCurve, theParameter, theName=None):
"""
Create a point, corresponding to the given parameter on the given curve.
Parameters:
theRefCurve The referenced curve.
theParameter Value of parameter on the referenced curve.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
Example of usage:
p_on_arc = geompy.MakeVertexOnCurve(Arc, 0.25)
"""
# Example: see GEOM_TestAll.py
theParameter, Parameters = ParseParameters(theParameter)
anObj = self.BasicOp.MakePointOnCurve(theRefCurve, theParameter)
RaiseIfFailed("MakePointOnCurve", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point by projection give coordinates on the given curve
# @param theRefCurve The referenced curve.
# @param theX X-coordinate in 3D space
# @param theY Y-coordinate in 3D space
# @param theZ Z-coordinate in 3D space
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref tui_creation_point "Example"
@ManageTransactions("BasicOp")
def MakeVertexOnCurveByCoord(self, theRefCurve, theX, theY, theZ, theName=None):
"""
Create a point by projection give coordinates on the given curve
Parameters:
theRefCurve The referenced curve.
theX X-coordinate in 3D space
theY Y-coordinate in 3D space
theZ Z-coordinate in 3D space
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
Example of usage:
p_on_arc3 = geompy.MakeVertexOnCurveByCoord(Arc, 100, -10, 10)
"""
# Example: see GEOM_TestAll.py
theX, theY, theZ, Parameters = ParseParameters(theX, theY, theZ)
anObj = self.BasicOp.MakePointOnCurveByCoord(theRefCurve, theX, theY, theZ)
RaiseIfFailed("MakeVertexOnCurveByCoord", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point, corresponding to the given length on the given curve.
# @param theRefCurve The referenced curve.
# @param theLength Length on the referenced curve. It can be negative.
# @param theStartPoint Point allowing to choose the direction for the calculation
# of the length. If None, start from the first point of theRefCurve.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref tui_creation_point "Example"
@ManageTransactions("BasicOp")
def MakeVertexOnCurveByLength(self, theRefCurve, theLength, theStartPoint = None, theName=None):
"""
Create a point, corresponding to the given length on the given curve.
Parameters:
theRefCurve The referenced curve.
theLength Length on the referenced curve. It can be negative.
theStartPoint Point allowing to choose the direction for the calculation
of the length. If None, start from the first point of theRefCurve.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
"""
# Example: see GEOM_TestAll.py
theLength, Parameters = ParseParameters(theLength)
anObj = self.BasicOp.MakePointOnCurveByLength(theRefCurve, theLength, theStartPoint)
RaiseIfFailed("MakePointOnCurveByLength", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point, corresponding to the given parameters on the
# given surface.
# @param theRefSurf The referenced surface.
# @param theUParameter Value of U-parameter on the referenced surface.
# @param theVParameter Value of V-parameter on the referenced surface.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref swig_MakeVertexOnSurface "Example"
@ManageTransactions("BasicOp")
def MakeVertexOnSurface(self, theRefSurf, theUParameter, theVParameter, theName=None):
"""
Create a point, corresponding to the given parameters on the
given surface.
Parameters:
theRefSurf The referenced surface.
theUParameter Value of U-parameter on the referenced surface.
theVParameter Value of V-parameter on the referenced surface.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
Example of usage:
p_on_face = geompy.MakeVertexOnSurface(Face, 0.1, 0.8)
"""
theUParameter, theVParameter, Parameters = ParseParameters(theUParameter, theVParameter)
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakePointOnSurface(theRefSurf, theUParameter, theVParameter)
RaiseIfFailed("MakePointOnSurface", self.BasicOp)
anObj.SetParameters(Parameters);
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point by projection give coordinates on the given surface
# @param theRefSurf The referenced surface.
# @param theX X-coordinate in 3D space
# @param theY Y-coordinate in 3D space
# @param theZ Z-coordinate in 3D space
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref swig_MakeVertexOnSurfaceByCoord "Example"
@ManageTransactions("BasicOp")
def MakeVertexOnSurfaceByCoord(self, theRefSurf, theX, theY, theZ, theName=None):
"""
Create a point by projection give coordinates on the given surface
Parameters:
theRefSurf The referenced surface.
theX X-coordinate in 3D space
theY Y-coordinate in 3D space
theZ Z-coordinate in 3D space
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
Example of usage:
p_on_face2 = geompy.MakeVertexOnSurfaceByCoord(Face, 0., 0., 0.)
"""
theX, theY, theZ, Parameters = ParseParameters(theX, theY, theZ)
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakePointOnSurfaceByCoord(theRefSurf, theX, theY, theZ)
RaiseIfFailed("MakeVertexOnSurfaceByCoord", self.BasicOp)
anObj.SetParameters(Parameters);
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point, which lays on the given face.
# The point will lay in arbitrary place of the face.
# The only condition on it is a non-zero distance to the face boundary.
# Such point can be used to uniquely identify the face inside any
# shape in case, when the shape does not contain overlapped faces.
# @param theFace The referenced face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref swig_MakeVertexInsideFace "Example"
@ManageTransactions("BasicOp")
def MakeVertexInsideFace (self, theFace, theName=None):
"""
Create a point, which lays on the given face.
The point will lay in arbitrary place of the face.
The only condition on it is a non-zero distance to the face boundary.
Such point can be used to uniquely identify the face inside any
shape in case, when the shape does not contain overlapped faces.
Parameters:
theFace The referenced face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
Example of usage:
p_on_face = geompy.MakeVertexInsideFace(Face)
"""
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakePointOnFace(theFace)
RaiseIfFailed("MakeVertexInsideFace", self.BasicOp)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a point on intersection of two lines.
# @param theRefLine1, theRefLine2 The referenced lines.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref swig_MakeVertexOnLinesIntersection "Example"
@ManageTransactions("BasicOp")
def MakeVertexOnLinesIntersection(self, theRefLine1, theRefLine2, theName=None):
"""
Create a point on intersection of two lines.
Parameters:
theRefLine1, theRefLine2 The referenced lines.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
"""
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakePointOnLinesIntersection(theRefLine1, theRefLine2)
RaiseIfFailed("MakePointOnLinesIntersection", self.BasicOp)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Create a tangent, corresponding to the given parameter on the given curve.
# @param theRefCurve The referenced curve.
# @param theParameter Value of parameter on the referenced curve.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created tangent.
#
# @ref swig_MakeTangentOnCurve "Example"
@ManageTransactions("BasicOp")
def MakeTangentOnCurve(self, theRefCurve, theParameter, theName=None):
"""
Create a tangent, corresponding to the given parameter on the given curve.
Parameters:
theRefCurve The referenced curve.
theParameter Value of parameter on the referenced curve.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created tangent.
Example of usage:
tan_on_arc = geompy.MakeTangentOnCurve(Arc, 0.7)
"""
anObj = self.BasicOp.MakeTangentOnCurve(theRefCurve, theParameter)
RaiseIfFailed("MakeTangentOnCurve", self.BasicOp)
self._autoPublish(anObj, theName, "tangent")
return anObj
## Create a tangent plane, corresponding to the given parameter on the given face.
# @param theFace The face for which tangent plane should be built.
# @param theParameterV vertical value of the center point (0.0 - 1.0).
# @param theParameterU horisontal value of the center point (0.0 - 1.0).
# @param theTrimSize the size of plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created tangent.
#
# @ref swig_MakeTangentPlaneOnFace "Example"
@ManageTransactions("BasicOp")
def MakeTangentPlaneOnFace(self, theFace, theParameterU, theParameterV, theTrimSize, theName=None):
"""
Create a tangent plane, corresponding to the given parameter on the given face.
Parameters:
theFace The face for which tangent plane should be built.
theParameterV vertical value of the center point (0.0 - 1.0).
theParameterU horisontal value of the center point (0.0 - 1.0).
theTrimSize the size of plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created tangent.
Example of usage:
an_on_face = geompy.MakeTangentPlaneOnFace(tan_extrusion, 0.7, 0.5, 150)
"""
anObj = self.BasicOp.MakeTangentPlaneOnFace(theFace, theParameterU, theParameterV, theTrimSize)
RaiseIfFailed("MakeTangentPlaneOnFace", self.BasicOp)
self._autoPublish(anObj, theName, "tangent")
return anObj
## Create a vector with the given components.
# @param theDX X component of the vector.
# @param theDY Y component of the vector.
# @param theDZ Z component of the vector.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created vector.
#
# @ref tui_creation_vector "Example"
@ManageTransactions("BasicOp")
def MakeVectorDXDYDZ(self, theDX, theDY, theDZ, theName=None):
"""
Create a vector with the given components.
Parameters:
theDX X component of the vector.
theDY Y component of the vector.
theDZ Z component of the vector.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created vector.
"""
# Example: see GEOM_TestAll.py
theDX,theDY,theDZ,Parameters = ParseParameters(theDX, theDY, theDZ)
anObj = self.BasicOp.MakeVectorDXDYDZ(theDX, theDY, theDZ)
RaiseIfFailed("MakeVectorDXDYDZ", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "vector")
return anObj
## Create a vector between two points.
# @param thePnt1 Start point for the vector.
# @param thePnt2 End point for the vector.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created vector.
#
# @ref tui_creation_vector "Example"
@ManageTransactions("BasicOp")
def MakeVector(self, thePnt1, thePnt2, theName=None):
"""
Create a vector between two points.
Parameters:
thePnt1 Start point for the vector.
thePnt2 End point for the vector.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created vector.
"""
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakeVectorTwoPnt(thePnt1, thePnt2)
RaiseIfFailed("MakeVectorTwoPnt", self.BasicOp)
self._autoPublish(anObj, theName, "vector")
return anObj
## Create a line, passing through the given point
# and parrallel to the given direction
# @param thePnt Point. The resulting line will pass through it.
# @param theDir Direction. The resulting line will be parallel to it.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created line.
#
# @ref tui_creation_line "Example"
@ManageTransactions("BasicOp")
def MakeLine(self, thePnt, theDir, theName=None):
"""
Create a line, passing through the given point
and parrallel to the given direction
Parameters:
thePnt Point. The resulting line will pass through it.
theDir Direction. The resulting line will be parallel to it.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created line.
"""
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakeLine(thePnt, theDir)
RaiseIfFailed("MakeLine", self.BasicOp)
self._autoPublish(anObj, theName, "line")
return anObj
## Create a line, passing through the given points
# @param thePnt1 First of two points, defining the line.
# @param thePnt2 Second of two points, defining the line.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created line.
#
# @ref tui_creation_line "Example"
@ManageTransactions("BasicOp")
def MakeLineTwoPnt(self, thePnt1, thePnt2, theName=None):
"""
Create a line, passing through the given points
Parameters:
thePnt1 First of two points, defining the line.
thePnt2 Second of two points, defining the line.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created line.
"""
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakeLineTwoPnt(thePnt1, thePnt2)
RaiseIfFailed("MakeLineTwoPnt", self.BasicOp)
self._autoPublish(anObj, theName, "line")
return anObj
## Create a line on two faces intersection.
# @param theFace1 First of two faces, defining the line.
# @param theFace2 Second of two faces, defining the line.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created line.
#
# @ref swig_MakeLineTwoFaces "Example"
@ManageTransactions("BasicOp")
def MakeLineTwoFaces(self, theFace1, theFace2, theName=None):
"""
Create a line on two faces intersection.
Parameters:
theFace1 First of two faces, defining the line.
theFace2 Second of two faces, defining the line.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created line.
"""
# Example: see GEOM_TestAll.py
anObj = self.BasicOp.MakeLineTwoFaces(theFace1, theFace2)
RaiseIfFailed("MakeLineTwoFaces", self.BasicOp)
self._autoPublish(anObj, theName, "line")
return anObj
## Create a plane, passing through the given point
# and normal to the given vector.
# @param thePnt Point, the plane has to pass through.
# @param theVec Vector, defining the plane normal direction.
# @param theTrimSize Half size of a side of quadrangle face, representing the plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created plane.
#
# @ref tui_creation_plane "Example"
@ManageTransactions("BasicOp")
def MakePlane(self, thePnt, theVec, theTrimSize, theName=None):
"""
Create a plane, passing through the given point
and normal to the given vector.
Parameters:
thePnt Point, the plane has to pass through.
theVec Vector, defining the plane normal direction.
theTrimSize Half size of a side of quadrangle face, representing the plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created plane.
"""
# Example: see GEOM_TestAll.py
theTrimSize, Parameters = ParseParameters(theTrimSize);
anObj = self.BasicOp.MakePlanePntVec(thePnt, theVec, theTrimSize)
RaiseIfFailed("MakePlanePntVec", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "plane")
return anObj
## Create a plane, passing through the three given points
# @param thePnt1 First of three points, defining the plane.
# @param thePnt2 Second of three points, defining the plane.
# @param thePnt3 Fird of three points, defining the plane.
# @param theTrimSize Half size of a side of quadrangle face, representing the plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created plane.
#
# @ref tui_creation_plane "Example"
@ManageTransactions("BasicOp")
def MakePlaneThreePnt(self, thePnt1, thePnt2, thePnt3, theTrimSize, theName=None):
"""
Create a plane, passing through the three given points
Parameters:
thePnt1 First of three points, defining the plane.
thePnt2 Second of three points, defining the plane.
thePnt3 Fird of three points, defining the plane.
theTrimSize Half size of a side of quadrangle face, representing the plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created plane.
"""
# Example: see GEOM_TestAll.py
theTrimSize, Parameters = ParseParameters(theTrimSize);
anObj = self.BasicOp.MakePlaneThreePnt(thePnt1, thePnt2, thePnt3, theTrimSize)
RaiseIfFailed("MakePlaneThreePnt", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "plane")
return anObj
## Create a plane, similar to the existing one, but with another size of representing face.
# @param theFace Referenced plane or LCS(Marker).
# @param theTrimSize New half size of a side of quadrangle face, representing the plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created plane.
#
# @ref tui_creation_plane "Example"
@ManageTransactions("BasicOp")
def MakePlaneFace(self, theFace, theTrimSize, theName=None):
"""
Create a plane, similar to the existing one, but with another size of representing face.
Parameters:
theFace Referenced plane or LCS(Marker).
theTrimSize New half size of a side of quadrangle face, representing the plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created plane.
"""
# Example: see GEOM_TestAll.py
theTrimSize, Parameters = ParseParameters(theTrimSize);
anObj = self.BasicOp.MakePlaneFace(theFace, theTrimSize)
RaiseIfFailed("MakePlaneFace", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "plane")
return anObj
## Create a plane, passing through the 2 vectors
# with center in a start point of the first vector.
# @param theVec1 Vector, defining center point and plane direction.
# @param theVec2 Vector, defining the plane normal direction.
# @param theTrimSize Half size of a side of quadrangle face, representing the plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created plane.
#
# @ref tui_creation_plane "Example"
@ManageTransactions("BasicOp")
def MakePlane2Vec(self, theVec1, theVec2, theTrimSize, theName=None):
"""
Create a plane, passing through the 2 vectors
with center in a start point of the first vector.
Parameters:
theVec1 Vector, defining center point and plane direction.
theVec2 Vector, defining the plane normal direction.
theTrimSize Half size of a side of quadrangle face, representing the plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created plane.
"""
# Example: see GEOM_TestAll.py
theTrimSize, Parameters = ParseParameters(theTrimSize);
anObj = self.BasicOp.MakePlane2Vec(theVec1, theVec2, theTrimSize)
RaiseIfFailed("MakePlane2Vec", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "plane")
return anObj
## Create a plane, based on a Local coordinate system.
# @param theLCS coordinate system, defining plane.
# @param theTrimSize Half size of a side of quadrangle face, representing the plane.
# @param theOrientation OXY, OYZ or OZX orientation - (1, 2 or 3)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created plane.
#
# @ref tui_creation_plane "Example"
@ManageTransactions("BasicOp")
def MakePlaneLCS(self, theLCS, theTrimSize, theOrientation, theName=None):
"""
Create a plane, based on a Local coordinate system.
Parameters:
theLCS coordinate system, defining plane.
theTrimSize Half size of a side of quadrangle face, representing the plane.
theOrientation OXY, OYZ or OZX orientation - (1, 2 or 3)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created plane.
"""
# Example: see GEOM_TestAll.py
theTrimSize, Parameters = ParseParameters(theTrimSize);
anObj = self.BasicOp.MakePlaneLCS(theLCS, theTrimSize, theOrientation)
RaiseIfFailed("MakePlaneLCS", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "plane")
return anObj
## Create a local coordinate system.
# @param OX,OY,OZ Three coordinates of coordinate system origin.
# @param XDX,XDY,XDZ Three components of OX direction
# @param YDX,YDY,YDZ Three components of OY direction
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created coordinate system.
#
# @ref swig_MakeMarker "Example"
@ManageTransactions("BasicOp")
def MakeMarker(self, OX,OY,OZ, XDX,XDY,XDZ, YDX,YDY,YDZ, theName=None):
"""
Create a local coordinate system.
Parameters:
OX,OY,OZ Three coordinates of coordinate system origin.
XDX,XDY,XDZ Three components of OX direction
YDX,YDY,YDZ Three components of OY direction
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created coordinate system.
"""
# Example: see GEOM_TestAll.py
OX,OY,OZ, XDX,XDY,XDZ, YDX,YDY,YDZ, Parameters = ParseParameters(OX,OY,OZ, XDX,XDY,XDZ, YDX,YDY,YDZ);
anObj = self.BasicOp.MakeMarker(OX,OY,OZ, XDX,XDY,XDZ, YDX,YDY,YDZ)
RaiseIfFailed("MakeMarker", self.BasicOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "lcs")
return anObj
## Create a local coordinate system from shape.
# @param theShape The initial shape to detect the coordinate system.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created coordinate system.
#
# @ref tui_creation_lcs "Example"
@ManageTransactions("BasicOp")
def MakeMarkerFromShape(self, theShape, theName=None):
"""
Create a local coordinate system from shape.
Parameters:
theShape The initial shape to detect the coordinate system.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created coordinate system.
"""
anObj = self.BasicOp.MakeMarkerFromShape(theShape)
RaiseIfFailed("MakeMarkerFromShape", self.BasicOp)
self._autoPublish(anObj, theName, "lcs")
return anObj
## Create a local coordinate system from point and two vectors.
# @param theOrigin Point of coordinate system origin.
# @param theXVec Vector of X direction
# @param theYVec Vector of Y direction
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created coordinate system.
#
# @ref tui_creation_lcs "Example"
@ManageTransactions("BasicOp")
def MakeMarkerPntTwoVec(self, theOrigin, theXVec, theYVec, theName=None):
"""
Create a local coordinate system from point and two vectors.
Parameters:
theOrigin Point of coordinate system origin.
theXVec Vector of X direction
theYVec Vector of Y direction
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created coordinate system.
"""
anObj = self.BasicOp.MakeMarkerPntTwoVec(theOrigin, theXVec, theYVec)
RaiseIfFailed("MakeMarkerPntTwoVec", self.BasicOp)
self._autoPublish(anObj, theName, "lcs")
return anObj
# end of l3_basic_go
## @}
## @addtogroup l4_curves
## @{
## Create an arc of circle, passing through three given points.
# @param thePnt1 Start point of the arc.
# @param thePnt2 Middle point of the arc.
# @param thePnt3 End point of the arc.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created arc.
#
# @ref swig_MakeArc "Example"
@ManageTransactions("CurvesOp")
def MakeArc(self, thePnt1, thePnt2, thePnt3, theName=None):
"""
Create an arc of circle, passing through three given points.
Parameters:
thePnt1 Start point of the arc.
thePnt2 Middle point of the arc.
thePnt3 End point of the arc.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created arc.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeArc(thePnt1, thePnt2, thePnt3)
RaiseIfFailed("MakeArc", self.CurvesOp)
self._autoPublish(anObj, theName, "arc")
return anObj
## Create an arc of circle from a center and 2 points.
# @param thePnt1 Center of the arc
# @param thePnt2 Start point of the arc. (Gives also the radius of the arc)
# @param thePnt3 End point of the arc (Gives also a direction)
# @param theSense Orientation of the arc
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created arc.
#
# @ref swig_MakeArc "Example"
@ManageTransactions("CurvesOp")
def MakeArcCenter(self, thePnt1, thePnt2, thePnt3, theSense=False, theName=None):
"""
Create an arc of circle from a center and 2 points.
Parameters:
thePnt1 Center of the arc
thePnt2 Start point of the arc. (Gives also the radius of the arc)
thePnt3 End point of the arc (Gives also a direction)
theSense Orientation of the arc
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created arc.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeArcCenter(thePnt1, thePnt2, thePnt3, theSense)
RaiseIfFailed("MakeArcCenter", self.CurvesOp)
self._autoPublish(anObj, theName, "arc")
return anObj
## Create an arc of ellipse, of center and two points.
# @param theCenter Center of the arc.
# @param thePnt1 defines major radius of the arc by distance from Pnt1 to Pnt2.
# @param thePnt2 defines plane of ellipse and minor radius as distance from Pnt3 to line from Pnt1 to Pnt2.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created arc.
#
# @ref swig_MakeArc "Example"
@ManageTransactions("CurvesOp")
def MakeArcOfEllipse(self, theCenter, thePnt1, thePnt2, theName=None):
"""
Create an arc of ellipse, of center and two points.
Parameters:
theCenter Center of the arc.
thePnt1 defines major radius of the arc by distance from Pnt1 to Pnt2.
thePnt2 defines plane of ellipse and minor radius as distance from Pnt3 to line from Pnt1 to Pnt2.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created arc.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeArcOfEllipse(theCenter, thePnt1, thePnt2)
RaiseIfFailed("MakeArcOfEllipse", self.CurvesOp)
self._autoPublish(anObj, theName, "arc")
return anObj
## Create a circle with given center, normal vector and radius.
# @param thePnt Circle center.
# @param theVec Vector, normal to the plane of the circle.
# @param theR Circle radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created circle.
#
# @ref tui_creation_circle "Example"
@ManageTransactions("CurvesOp")
def MakeCircle(self, thePnt, theVec, theR, theName=None):
"""
Create a circle with given center, normal vector and radius.
Parameters:
thePnt Circle center.
theVec Vector, normal to the plane of the circle.
theR Circle radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created circle.
"""
# Example: see GEOM_TestAll.py
theR, Parameters = ParseParameters(theR)
anObj = self.CurvesOp.MakeCirclePntVecR(thePnt, theVec, theR)
RaiseIfFailed("MakeCirclePntVecR", self.CurvesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "circle")
return anObj
## Create a circle with given radius.
# Center of the circle will be in the origin of global
# coordinate system and normal vector will be codirected with Z axis
# @param theR Circle radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created circle.
@ManageTransactions("CurvesOp")
def MakeCircleR(self, theR, theName=None):
"""
Create a circle with given radius.
Center of the circle will be in the origin of global
coordinate system and normal vector will be codirected with Z axis
Parameters:
theR Circle radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created circle.
"""
anObj = self.CurvesOp.MakeCirclePntVecR(None, None, theR)
RaiseIfFailed("MakeCirclePntVecR", self.CurvesOp)
self._autoPublish(anObj, theName, "circle")
return anObj
## Create a circle, passing through three given points
# @param thePnt1,thePnt2,thePnt3 Points, defining the circle.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created circle.
#
# @ref tui_creation_circle "Example"
@ManageTransactions("CurvesOp")
def MakeCircleThreePnt(self, thePnt1, thePnt2, thePnt3, theName=None):
"""
Create a circle, passing through three given points
Parameters:
thePnt1,thePnt2,thePnt3 Points, defining the circle.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created circle.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeCircleThreePnt(thePnt1, thePnt2, thePnt3)
RaiseIfFailed("MakeCircleThreePnt", self.CurvesOp)
self._autoPublish(anObj, theName, "circle")
return anObj
## Create a circle, with given point1 as center,
# passing through the point2 as radius and laying in the plane,
# defined by all three given points.
# @param thePnt1,thePnt2,thePnt3 Points, defining the circle.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created circle.
#
# @ref swig_MakeCircle "Example"
@ManageTransactions("CurvesOp")
def MakeCircleCenter2Pnt(self, thePnt1, thePnt2, thePnt3, theName=None):
"""
Create a circle, with given point1 as center,
passing through the point2 as radius and laying in the plane,
defined by all three given points.
Parameters:
thePnt1,thePnt2,thePnt3 Points, defining the circle.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created circle.
"""
# Example: see GEOM_example6.py
anObj = self.CurvesOp.MakeCircleCenter2Pnt(thePnt1, thePnt2, thePnt3)
RaiseIfFailed("MakeCircleCenter2Pnt", self.CurvesOp)
self._autoPublish(anObj, theName, "circle")
return anObj
## Create an ellipse with given center, normal vector and radiuses.
# @param thePnt Ellipse center.
# @param theVec Vector, normal to the plane of the ellipse.
# @param theRMajor Major ellipse radius.
# @param theRMinor Minor ellipse radius.
# @param theVecMaj Vector, direction of the ellipse's main axis.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created ellipse.
#
# @ref tui_creation_ellipse "Example"
@ManageTransactions("CurvesOp")
def MakeEllipse(self, thePnt, theVec, theRMajor, theRMinor, theVecMaj=None, theName=None):
"""
Create an ellipse with given center, normal vector and radiuses.
Parameters:
thePnt Ellipse center.
theVec Vector, normal to the plane of the ellipse.
theRMajor Major ellipse radius.
theRMinor Minor ellipse radius.
theVecMaj Vector, direction of the ellipse's main axis.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created ellipse.
"""
# Example: see GEOM_TestAll.py
theRMajor, theRMinor, Parameters = ParseParameters(theRMajor, theRMinor)
if theVecMaj is not None:
anObj = self.CurvesOp.MakeEllipseVec(thePnt, theVec, theRMajor, theRMinor, theVecMaj)
else:
anObj = self.CurvesOp.MakeEllipse(thePnt, theVec, theRMajor, theRMinor)
pass
RaiseIfFailed("MakeEllipse", self.CurvesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "ellipse")
return anObj
## Create an ellipse with given radiuses.
# Center of the ellipse will be in the origin of global
# coordinate system and normal vector will be codirected with Z axis
# @param theRMajor Major ellipse radius.
# @param theRMinor Minor ellipse radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created ellipse.
@ManageTransactions("CurvesOp")
def MakeEllipseRR(self, theRMajor, theRMinor, theName=None):
"""
Create an ellipse with given radiuses.
Center of the ellipse will be in the origin of global
coordinate system and normal vector will be codirected with Z axis
Parameters:
theRMajor Major ellipse radius.
theRMinor Minor ellipse radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created ellipse.
"""
anObj = self.CurvesOp.MakeEllipse(None, None, theRMajor, theRMinor)
RaiseIfFailed("MakeEllipse", self.CurvesOp)
self._autoPublish(anObj, theName, "ellipse")
return anObj
## Create a polyline on the set of points.
# @param thePoints Sequence of points for the polyline.
# @param theIsClosed If True, build a closed wire.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created polyline.
#
# @ref tui_creation_curve "Example"
@ManageTransactions("CurvesOp")
def MakePolyline(self, thePoints, theIsClosed=False, theName=None):
"""
Create a polyline on the set of points.
Parameters:
thePoints Sequence of points for the polyline.
theIsClosed If True, build a closed wire.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created polyline.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakePolyline(thePoints, theIsClosed)
RaiseIfFailed("MakePolyline", self.CurvesOp)
self._autoPublish(anObj, theName, "polyline")
return anObj
## Create bezier curve on the set of points.
# @param thePoints Sequence of points for the bezier curve.
# @param theIsClosed If True, build a closed curve.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created bezier curve.
#
# @ref tui_creation_curve "Example"
@ManageTransactions("CurvesOp")
def MakeBezier(self, thePoints, theIsClosed=False, theName=None):
"""
Create bezier curve on the set of points.
Parameters:
thePoints Sequence of points for the bezier curve.
theIsClosed If True, build a closed curve.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created bezier curve.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeSplineBezier(thePoints, theIsClosed)
RaiseIfFailed("MakeSplineBezier", self.CurvesOp)
self._autoPublish(anObj, theName, "bezier")
return anObj
## Create B-Spline curve on the set of points.
# @param thePoints Sequence of points for the B-Spline curve.
# @param theIsClosed If True, build a closed curve.
# @param theDoReordering If TRUE, the algo does not follow the order of
# \a thePoints but searches for the closest vertex.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created B-Spline curve.
#
# @ref tui_creation_curve "Example"
@ManageTransactions("CurvesOp")
def MakeInterpol(self, thePoints, theIsClosed=False, theDoReordering=False, theName=None):
"""
Create B-Spline curve on the set of points.
Parameters:
thePoints Sequence of points for the B-Spline curve.
theIsClosed If True, build a closed curve.
theDoReordering If True, the algo does not follow the order of
thePoints but searches for the closest vertex.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created B-Spline curve.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeSplineInterpolation(thePoints, theIsClosed, theDoReordering)
RaiseIfFailed("MakeInterpol", self.CurvesOp)
self._autoPublish(anObj, theName, "bspline")
return anObj
## Create B-Spline curve on the set of points.
# @param thePoints Sequence of points for the B-Spline curve.
# @param theFirstVec Vector object, defining the curve direction at its first point.
# @param theLastVec Vector object, defining the curve direction at its last point.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created B-Spline curve.
#
# @ref tui_creation_curve "Example"
@ManageTransactions("CurvesOp")
def MakeInterpolWithTangents(self, thePoints, theFirstVec, theLastVec, theName=None):
"""
Create B-Spline curve on the set of points.
Parameters:
thePoints Sequence of points for the B-Spline curve.
theFirstVec Vector object, defining the curve direction at its first point.
theLastVec Vector object, defining the curve direction at its last point.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created B-Spline curve.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeSplineInterpolWithTangents(thePoints, theFirstVec, theLastVec)
RaiseIfFailed("MakeInterpolWithTangents", self.CurvesOp)
self._autoPublish(anObj, theName, "bspline")
return anObj
## Creates a curve using the parametric definition of the basic points.
# @param thexExpr parametric equation of the coordinates X.
# @param theyExpr parametric equation of the coordinates Y.
# @param thezExpr parametric equation of the coordinates Z.
# @param theParamMin the minimal value of the parameter.
# @param theParamMax the maximum value of the parameter.
# @param theParamStep the number of steps if theNewMethod = True, else step value of the parameter.
# @param theCurveType the type of the curve,
# one of GEOM.Polyline, GEOM.Bezier, GEOM.Interpolation.
# @param theNewMethod flag for switching to the new method if the flag is set to false a deprecated method is used which can lead to a bug.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created curve.
#
# @ref tui_creation_curve "Example"
@ManageTransactions("CurvesOp")
def MakeCurveParametric(self, thexExpr, theyExpr, thezExpr,
theParamMin, theParamMax, theParamStep, theCurveType, theNewMethod=False, theName=None ):
"""
Creates a curve using the parametric definition of the basic points.
Parameters:
thexExpr parametric equation of the coordinates X.
theyExpr parametric equation of the coordinates Y.
thezExpr parametric equation of the coordinates Z.
theParamMin the minimal value of the parameter.
theParamMax the maximum value of the parameter.
theParamStep the number of steps if theNewMethod = True, else step value of the parameter.
theCurveType the type of the curve,
one of GEOM.Polyline, GEOM.Bezier, GEOM.Interpolation.
theNewMethod flag for switching to the new method if the flag is set to false a deprecated
method is used which can lead to a bug.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created curve.
"""
theParamMin,theParamMax,theParamStep,Parameters = ParseParameters(theParamMin,theParamMax,theParamStep)
if theNewMethod:
anObj = self.CurvesOp.MakeCurveParametricNew(thexExpr,theyExpr,thezExpr,theParamMin,theParamMax,theParamStep,theCurveType)
else:
anObj = self.CurvesOp.MakeCurveParametric(thexExpr,theyExpr,thezExpr,theParamMin,theParamMax,theParamStep,theCurveType)
RaiseIfFailed("MakeSplineInterpolation", self.CurvesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "curve")
return anObj
## Create an isoline curve on a face.
# @param theFace the face for which an isoline is created.
# @param IsUIsoline True for U-isoline creation; False for V-isoline
# creation.
# @param theParameter the U parameter for U-isoline or V parameter
# for V-isoline.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created isoline edge or
# a compound of edges.
#
# @ref tui_creation_curve "Example"
@ManageTransactions("CurvesOp")
def MakeIsoline(self, theFace, IsUIsoline, theParameter, theName=None):
"""
Create an isoline curve on a face.
Parameters:
theFace the face for which an isoline is created.
IsUIsoline True for U-isoline creation; False for V-isoline
creation.
theParameter the U parameter for U-isoline or V parameter
for V-isoline.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created isoline edge or a
compound of edges.
"""
# Example: see GEOM_TestAll.py
anObj = self.CurvesOp.MakeIsoline(theFace, IsUIsoline, theParameter)
RaiseIfFailed("MakeIsoline", self.CurvesOp)
if IsUIsoline:
self._autoPublish(anObj, theName, "U-Isoline")
else:
self._autoPublish(anObj, theName, "V-Isoline")
return anObj
# end of l4_curves
## @}
## @addtogroup l3_sketcher
## @{
## Create a sketcher (wire or face), following the textual description,
# passed through <VAR>theCommand</VAR> argument. \n
# Edges of the resulting wire or face will be arcs of circles and/or linear segments. \n
# Format of the description string have to be the following:
#
# "Sketcher[:F x1 y1]:CMD[:CMD[:CMD...]]"
#
# Where:
# - x1, y1 are coordinates of the first sketcher point (zero by default),
# - CMD is one of
# - "R angle" : Set the direction by angle
# - "D dx dy" : Set the direction by DX & DY
# .
# \n
# - "TT x y" : Create segment by point at X & Y
# - "T dx dy" : Create segment by point with DX & DY
# - "L length" : Create segment by direction & Length
# - "IX x" : Create segment by direction & Intersect. X
# - "IY y" : Create segment by direction & Intersect. Y
# .
# \n
# - "C radius length" : Create arc by direction, radius and length(in degree)
# - "AA x y": Create arc by point at X & Y
# - "A dx dy" : Create arc by point with DX & DY
# - "UU x y radius flag1": Create arc by point at X & Y with given radiUs
# - "U dx dy radius flag1" : Create arc by point with DX & DY with given radiUs
# - "EE x y xc yc flag1 flag2": Create arc by point at X & Y with given cEnter coordinates
# - "E dx dy dxc dyc radius flag1 flag2" : Create arc by point with DX & DY with given cEnter coordinates
# .
# \n
# - "WW" : Close Wire (to finish)
# - "WF" : Close Wire and build face (to finish)
# .
# \n
# - Flag1 (= reverse) is 0 or 2 ...
# - if 0 the drawn arc is the one of lower angle (< Pi)
# - if 2 the drawn arc ius the one of greater angle (> Pi)
# .
# \n
# - Flag2 (= control tolerance) is 0 or 1 ...
# - if 0 the specified end point can be at a distance of the arc greater than the tolerance (10^-7)
# - if 1 the wire is built only if the end point is on the arc
# with a tolerance of 10^-7 on the distance else the creation fails
#
# @param theCommand String, defining the sketcher in local
# coordinates of the working plane.
# @param theWorkingPlane Nine double values, defining origin,
# OZ and OX directions of the working plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created wire.
#
# @ref tui_sketcher_page "Example"
@ManageTransactions("CurvesOp")
def MakeSketcher(self, theCommand, theWorkingPlane = [0,0,0, 0,0,1, 1,0,0], theName=None):
"""
Create a sketcher (wire or face), following the textual description, passed
through theCommand argument.
Edges of the resulting wire or face will be arcs of circles and/or linear segments.
Format of the description string have to be the following:
"Sketcher[:F x1 y1]:CMD[:CMD[:CMD...]]"
Where:
- x1, y1 are coordinates of the first sketcher point (zero by default),
- CMD is one of
- "R angle" : Set the direction by angle
- "D dx dy" : Set the direction by DX & DY
- "TT x y" : Create segment by point at X & Y
- "T dx dy" : Create segment by point with DX & DY
- "L length" : Create segment by direction & Length
- "IX x" : Create segment by direction & Intersect. X
- "IY y" : Create segment by direction & Intersect. Y
- "C radius length" : Create arc by direction, radius and length(in degree)
- "AA x y": Create arc by point at X & Y
- "A dx dy" : Create arc by point with DX & DY
- "UU x y radius flag1": Create arc by point at X & Y with given radiUs
- "U dx dy radius flag1" : Create arc by point with DX & DY with given radiUs
- "EE x y xc yc flag1 flag2": Create arc by point at X & Y with given cEnter coordinates
- "E dx dy dxc dyc radius flag1 flag2" : Create arc by point with DX & DY with given cEnter coordinates
- "WW" : Close Wire (to finish)
- "WF" : Close Wire and build face (to finish)
- Flag1 (= reverse) is 0 or 2 ...
- if 0 the drawn arc is the one of lower angle (< Pi)
- if 2 the drawn arc ius the one of greater angle (> Pi)
- Flag2 (= control tolerance) is 0 or 1 ...
- if 0 the specified end point can be at a distance of the arc greater than the tolerance (10^-7)
- if 1 the wire is built only if the end point is on the arc
with a tolerance of 10^-7 on the distance else the creation fails
Parameters:
theCommand String, defining the sketcher in local
coordinates of the working plane.
theWorkingPlane Nine double values, defining origin,
OZ and OX directions of the working plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created wire.
"""
# Example: see GEOM_TestAll.py
theCommand,Parameters = ParseSketcherCommand(theCommand)
anObj = self.CurvesOp.MakeSketcher(theCommand, theWorkingPlane)
RaiseIfFailed("MakeSketcher", self.CurvesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "wire")
return anObj
## Create a sketcher (wire or face), following the textual description,
# passed through <VAR>theCommand</VAR> argument. \n
# For format of the description string see MakeSketcher() method.\n
# @param theCommand String, defining the sketcher in local
# coordinates of the working plane.
# @param theWorkingPlane Planar Face or LCS(Marker) of the working plane.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created wire.
#
# @ref tui_sketcher_page "Example"
@ManageTransactions("CurvesOp")
def MakeSketcherOnPlane(self, theCommand, theWorkingPlane, theName=None):
"""
Create a sketcher (wire or face), following the textual description,
passed through theCommand argument.
For format of the description string see geompy.MakeSketcher() method.
Parameters:
theCommand String, defining the sketcher in local
coordinates of the working plane.
theWorkingPlane Planar Face or LCS(Marker) of the working plane.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created wire.
"""
theCommand,Parameters = ParseSketcherCommand(theCommand)
anObj = self.CurvesOp.MakeSketcherOnPlane(theCommand, theWorkingPlane)
RaiseIfFailed("MakeSketcherOnPlane", self.CurvesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "wire")
return anObj
## Obtain a 2D sketcher interface
# @return An instance of @ref gsketcher.Sketcher2D "Sketcher2D" interface
def Sketcher2D (self):
"""
Obtain a 2D sketcher interface.
Example of usage:
sk = geompy.Sketcher2D()
sk.addPoint(20, 20)
sk.addSegmentRelative(15, 70)
sk.addSegmentPerpY(50)
sk.addArcRadiusRelative(25, 15, 14.5, 0)
sk.addArcCenterAbsolute(1, 1, 50, 50, 0, 0)
sk.addArcDirectionRadiusLength(20, 20, 101, 162.13)
sk.close()
Sketch_1 = sk.wire(geomObj_1)
"""
sk = Sketcher2D (self)
return sk
## Create a sketcher wire, following the numerical description,
# passed through <VAR>theCoordinates</VAR> argument. \n
# @param theCoordinates double values, defining points to create a wire,
# passing from it.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created wire.
#
# @ref tui_3dsketcher_page "Example"
@ManageTransactions("CurvesOp")
def Make3DSketcher(self, theCoordinates, theName=None):
"""
Create a sketcher wire, following the numerical description,
passed through theCoordinates argument.
Parameters:
theCoordinates double values, defining points to create a wire,
passing from it.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing the created wire.
"""
theCoordinates,Parameters = ParseParameters(theCoordinates)
anObj = self.CurvesOp.Make3DSketcher(theCoordinates)
RaiseIfFailed("Make3DSketcher", self.CurvesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "wire")
return anObj
## Obtain a 3D sketcher interface
# @return An instance of @ref gsketcher.Sketcher3D "Sketcher3D" interface
#
# @ref tui_3dsketcher_page "Example"
def Sketcher3D (self):
"""
Obtain a 3D sketcher interface.
Example of usage:
sk = geompy.Sketcher3D()
sk.addPointsAbsolute(0,0,0, 70,0,0)
sk.addPointsRelative(0, 0, 130)
sk.addPointAnglesLength("OXY", 50, 0, 100)
sk.addPointAnglesLength("OXZ", 30, 80, 130)
sk.close()
a3D_Sketcher_1 = sk.wire()
"""
sk = Sketcher3D (self)
return sk
## Obtain a 2D polyline creation interface
# @return An instance of @ref gsketcher.Polyline2D "Polyline2D" interface
#
# @ref tui_3dsketcher_page "Example"
def Polyline2D (self):
"""
Obtain a 2D polyline creation interface.
Example of usage:
pl = geompy.Polyline2D()
pl.addSection("section 1", GEOM.Polyline, True)
pl.addPoints(0, 0, 10, 0, 10, 10)
pl.addSection("section 2", GEOM.Interpolation, False)
pl.addPoints(20, 0, 30, 0, 30, 10)
resultObj = pl.result(WorkingPlane)
"""
pl = Polyline2D (self)
return pl
# end of l3_sketcher
## @}
## @addtogroup l3_3d_primitives
## @{
## Create a box by coordinates of two opposite vertices.
#
# @param x1,y1,z1 double values, defining first point it.
# @param x2,y2,z2 double values, defining first point it.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created box.
#
# @ref tui_creation_box "Example"
def MakeBox(self, x1, y1, z1, x2, y2, z2, theName=None):
"""
Create a box by coordinates of two opposite vertices.
Parameters:
x1,y1,z1 double values, defining first point.
x2,y2,z2 double values, defining second point.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created box.
"""
# Example: see GEOM_TestAll.py
pnt1 = self.MakeVertex(x1,y1,z1)
pnt2 = self.MakeVertex(x2,y2,z2)
# note: auto-publishing is done in self.MakeBoxTwoPnt()
return self.MakeBoxTwoPnt(pnt1, pnt2, theName)
## Create a box with specified dimensions along the coordinate axes
# and with edges, parallel to the coordinate axes.
# Center of the box will be at point (DX/2, DY/2, DZ/2).
# @param theDX Length of Box edges, parallel to OX axis.
# @param theDY Length of Box edges, parallel to OY axis.
# @param theDZ Length of Box edges, parallel to OZ axis.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created box.
#
# @ref tui_creation_box "Example"
@ManageTransactions("PrimOp")
def MakeBoxDXDYDZ(self, theDX, theDY, theDZ, theName=None):
"""
Create a box with specified dimensions along the coordinate axes
and with edges, parallel to the coordinate axes.
Center of the box will be at point (DX/2, DY/2, DZ/2).
Parameters:
theDX Length of Box edges, parallel to OX axis.
theDY Length of Box edges, parallel to OY axis.
theDZ Length of Box edges, parallel to OZ axis.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created box.
"""
# Example: see GEOM_TestAll.py
theDX,theDY,theDZ,Parameters = ParseParameters(theDX, theDY, theDZ)
anObj = self.PrimOp.MakeBoxDXDYDZ(theDX, theDY, theDZ)
RaiseIfFailed("MakeBoxDXDYDZ", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "box")
return anObj
## Create a box with two specified opposite vertices,
# and with edges, parallel to the coordinate axes
# @param thePnt1 First of two opposite vertices.
# @param thePnt2 Second of two opposite vertices.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created box.
#
# @ref tui_creation_box "Example"
@ManageTransactions("PrimOp")
def MakeBoxTwoPnt(self, thePnt1, thePnt2, theName=None):
"""
Create a box with two specified opposite vertices,
and with edges, parallel to the coordinate axes
Parameters:
thePnt1 First of two opposite vertices.
thePnt2 Second of two opposite vertices.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created box.
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakeBoxTwoPnt(thePnt1, thePnt2)
RaiseIfFailed("MakeBoxTwoPnt", self.PrimOp)
self._autoPublish(anObj, theName, "box")
return anObj
## Create a face with specified dimensions with edges parallel to coordinate axes.
# @param theH height of Face.
# @param theW width of Face.
# @param theOrientation face orientation: 1-OXY, 2-OYZ, 3-OZX
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_creation_face "Example"
@ManageTransactions("PrimOp")
def MakeFaceHW(self, theH, theW, theOrientation, theName=None):
"""
Create a face with specified dimensions with edges parallel to coordinate axes.
Parameters:
theH height of Face.
theW width of Face.
theOrientation face orientation: 1-OXY, 2-OYZ, 3-OZX
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created face.
"""
# Example: see GEOM_TestAll.py
theH,theW,Parameters = ParseParameters(theH, theW)
anObj = self.PrimOp.MakeFaceHW(theH, theW, theOrientation)
RaiseIfFailed("MakeFaceHW", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "rectangle")
return anObj
## Create a face from another plane and two sizes,
# vertical size and horisontal size.
# @param theObj Normale vector to the creating face or
# the face object.
# @param theH Height (vertical size).
# @param theW Width (horisontal size).
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_creation_face "Example"
@ManageTransactions("PrimOp")
def MakeFaceObjHW(self, theObj, theH, theW, theName=None):
"""
Create a face from another plane and two sizes,
vertical size and horisontal size.
Parameters:
theObj Normale vector to the creating face or
the face object.
theH Height (vertical size).
theW Width (horisontal size).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing the created face.
"""
# Example: see GEOM_TestAll.py
theH,theW,Parameters = ParseParameters(theH, theW)
anObj = self.PrimOp.MakeFaceObjHW(theObj, theH, theW)
RaiseIfFailed("MakeFaceObjHW", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "rectangle")
return anObj
## Create a disk with given center, normal vector and radius.
# @param thePnt Disk center.
# @param theVec Vector, normal to the plane of the disk.
# @param theR Disk radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created disk.
#
# @ref tui_creation_disk "Example"
@ManageTransactions("PrimOp")
def MakeDiskPntVecR(self, thePnt, theVec, theR, theName=None):
"""
Create a disk with given center, normal vector and radius.
Parameters:
thePnt Disk center.
theVec Vector, normal to the plane of the disk.
theR Disk radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created disk.
"""
# Example: see GEOM_TestAll.py
theR,Parameters = ParseParameters(theR)
anObj = self.PrimOp.MakeDiskPntVecR(thePnt, theVec, theR)
RaiseIfFailed("MakeDiskPntVecR", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "disk")
return anObj
## Create a disk, passing through three given points
# @param thePnt1,thePnt2,thePnt3 Points, defining the disk.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created disk.
#
# @ref tui_creation_disk "Example"
@ManageTransactions("PrimOp")
def MakeDiskThreePnt(self, thePnt1, thePnt2, thePnt3, theName=None):
"""
Create a disk, passing through three given points
Parameters:
thePnt1,thePnt2,thePnt3 Points, defining the disk.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created disk.
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakeDiskThreePnt(thePnt1, thePnt2, thePnt3)
RaiseIfFailed("MakeDiskThreePnt", self.PrimOp)
self._autoPublish(anObj, theName, "disk")
return anObj
## Create a disk with specified dimensions along OX-OY coordinate axes.
# @param theR Radius of Face.
# @param theOrientation set the orientation belong axis OXY or OYZ or OZX
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created disk.
#
# @ref tui_creation_face "Example"
@ManageTransactions("PrimOp")
def MakeDiskR(self, theR, theOrientation, theName=None):
"""
Create a disk with specified dimensions along OX-OY coordinate axes.
Parameters:
theR Radius of Face.
theOrientation set the orientation belong axis OXY or OYZ or OZX
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created disk.
Example of usage:
Disk3 = geompy.MakeDiskR(100., 1)
"""
# Example: see GEOM_TestAll.py
theR,Parameters = ParseParameters(theR)
anObj = self.PrimOp.MakeDiskR(theR, theOrientation)
RaiseIfFailed("MakeDiskR", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "disk")
return anObj
## Create a cylinder with given base point, axis, radius and height.
# @param thePnt Central point of cylinder base.
# @param theAxis Cylinder axis.
# @param theR Cylinder radius.
# @param theH Cylinder height.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created cylinder.
#
# @ref tui_creation_cylinder "Example"
@ManageTransactions("PrimOp")
def MakeCylinder(self, thePnt, theAxis, theR, theH, theName=None):
"""
Create a cylinder with given base point, axis, radius and height.
Parameters:
thePnt Central point of cylinder base.
theAxis Cylinder axis.
theR Cylinder radius.
theH Cylinder height.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created cylinder.
"""
# Example: see GEOM_TestAll.py
theR,theH,Parameters = ParseParameters(theR, theH)
anObj = self.PrimOp.MakeCylinderPntVecRH(thePnt, theAxis, theR, theH)
RaiseIfFailed("MakeCylinderPntVecRH", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "cylinder")
return anObj
## Create a portion of cylinder with given base point, axis, radius, height and angle.
# @param thePnt Central point of cylinder base.
# @param theAxis Cylinder axis.
# @param theR Cylinder radius.
# @param theH Cylinder height.
# @param theA Cylinder angle in radians.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created cylinder.
#
# @ref tui_creation_cylinder "Example"
@ManageTransactions("PrimOp")
def MakeCylinderA(self, thePnt, theAxis, theR, theH, theA, theName=None):
"""
Create a portion of cylinder with given base point, axis, radius, height and angle.
Parameters:
thePnt Central point of cylinder base.
theAxis Cylinder axis.
theR Cylinder radius.
theH Cylinder height.
theA Cylinder angle in radians.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created cylinder.
"""
# Example: see GEOM_TestAll.py
flag = False
if isinstance(theA,str):
flag = True
theR,theH,theA,Parameters = ParseParameters(theR, theH, theA)
if flag:
theA = theA*math.pi/180.
anObj = self.PrimOp.MakeCylinderPntVecRHA(thePnt, theAxis, theR, theH, theA)
RaiseIfFailed("MakeCylinderPntVecRHA", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "cylinder")
return anObj
## Create a cylinder with given radius and height at
# the origin of coordinate system. Axis of the cylinder
# will be collinear to the OZ axis of the coordinate system.
# @param theR Cylinder radius.
# @param theH Cylinder height.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created cylinder.
#
# @ref tui_creation_cylinder "Example"
@ManageTransactions("PrimOp")
def MakeCylinderRH(self, theR, theH, theName=None):
"""
Create a cylinder with given radius and height at
the origin of coordinate system. Axis of the cylinder
will be collinear to the OZ axis of the coordinate system.
Parameters:
theR Cylinder radius.
theH Cylinder height.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created cylinder.
"""
# Example: see GEOM_TestAll.py
theR,theH,Parameters = ParseParameters(theR, theH)
anObj = self.PrimOp.MakeCylinderRH(theR, theH)
RaiseIfFailed("MakeCylinderRH", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "cylinder")
return anObj
## Create a portion of cylinder with given radius, height and angle at
# the origin of coordinate system. Axis of the cylinder
# will be collinear to the OZ axis of the coordinate system.
# @param theR Cylinder radius.
# @param theH Cylinder height.
# @param theA Cylinder angle in radians.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created cylinder.
#
# @ref tui_creation_cylinder "Example"
@ManageTransactions("PrimOp")
def MakeCylinderRHA(self, theR, theH, theA, theName=None):
"""
Create a portion of cylinder with given radius, height and angle at
the origin of coordinate system. Axis of the cylinder
will be collinear to the OZ axis of the coordinate system.
Parameters:
theR Cylinder radius.
theH Cylinder height.
theA Cylinder angle in radians.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created cylinder.
"""
# Example: see GEOM_TestAll.py
flag = False
if isinstance(theA,str):
flag = True
theR,theH,theA,Parameters = ParseParameters(theR, theH, theA)
if flag:
theA = theA*math.pi/180.
anObj = self.PrimOp.MakeCylinderRHA(theR, theH, theA)
RaiseIfFailed("MakeCylinderRHA", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "cylinder")
return anObj
## Create a sphere with given center and radius.
# @param thePnt Sphere center.
# @param theR Sphere radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created sphere.
#
# @ref tui_creation_sphere "Example"
@ManageTransactions("PrimOp")
def MakeSpherePntR(self, thePnt, theR, theName=None):
"""
Create a sphere with given center and radius.
Parameters:
thePnt Sphere center.
theR Sphere radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created sphere.
"""
# Example: see GEOM_TestAll.py
theR,Parameters = ParseParameters(theR)
anObj = self.PrimOp.MakeSpherePntR(thePnt, theR)
RaiseIfFailed("MakeSpherePntR", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "sphere")
return anObj
## Create a sphere with given center and radius.
# @param x,y,z Coordinates of sphere center.
# @param theR Sphere radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created sphere.
#
# @ref tui_creation_sphere "Example"
def MakeSphere(self, x, y, z, theR, theName=None):
"""
Create a sphere with given center and radius.
Parameters:
x,y,z Coordinates of sphere center.
theR Sphere radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created sphere.
"""
# Example: see GEOM_TestAll.py
point = self.MakeVertex(x, y, z)
# note: auto-publishing is done in self.MakeSpherePntR()
anObj = self.MakeSpherePntR(point, theR, theName)
return anObj
## Create a sphere with given radius at the origin of coordinate system.
# @param theR Sphere radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created sphere.
#
# @ref tui_creation_sphere "Example"
@ManageTransactions("PrimOp")
def MakeSphereR(self, theR, theName=None):
"""
Create a sphere with given radius at the origin of coordinate system.
Parameters:
theR Sphere radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created sphere.
"""
# Example: see GEOM_TestAll.py
theR,Parameters = ParseParameters(theR)
anObj = self.PrimOp.MakeSphereR(theR)
RaiseIfFailed("MakeSphereR", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "sphere")
return anObj
## Create a cone with given base point, axis, height and radiuses.
# @param thePnt Central point of the first cone base.
# @param theAxis Cone axis.
# @param theR1 Radius of the first cone base.
# @param theR2 Radius of the second cone base.
# \note If both radiuses are non-zero, the cone will be truncated.
# \note If the radiuses are equal, a cylinder will be created instead.
# @param theH Cone height.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created cone.
#
# @ref tui_creation_cone "Example"
@ManageTransactions("PrimOp")
def MakeCone(self, thePnt, theAxis, theR1, theR2, theH, theName=None):
"""
Create a cone with given base point, axis, height and radiuses.
Parameters:
thePnt Central point of the first cone base.
theAxis Cone axis.
theR1 Radius of the first cone base.
theR2 Radius of the second cone base.
theH Cone height.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
If both radiuses are non-zero, the cone will be truncated.
If the radiuses are equal, a cylinder will be created instead.
Returns:
New GEOM.GEOM_Object, containing the created cone.
"""
# Example: see GEOM_TestAll.py
theR1,theR2,theH,Parameters = ParseParameters(theR1,theR2,theH)
anObj = self.PrimOp.MakeConePntVecR1R2H(thePnt, theAxis, theR1, theR2, theH)
RaiseIfFailed("MakeConePntVecR1R2H", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "cone")
return anObj
## Create a cone with given height and radiuses at
# the origin of coordinate system. Axis of the cone will
# be collinear to the OZ axis of the coordinate system.
# @param theR1 Radius of the first cone base.
# @param theR2 Radius of the second cone base.
# \note If both radiuses are non-zero, the cone will be truncated.
# \note If the radiuses are equal, a cylinder will be created instead.
# @param theH Cone height.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created cone.
#
# @ref tui_creation_cone "Example"
@ManageTransactions("PrimOp")
def MakeConeR1R2H(self, theR1, theR2, theH, theName=None):
"""
Create a cone with given height and radiuses at
the origin of coordinate system. Axis of the cone will
be collinear to the OZ axis of the coordinate system.
Parameters:
theR1 Radius of the first cone base.
theR2 Radius of the second cone base.
theH Cone height.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
If both radiuses are non-zero, the cone will be truncated.
If the radiuses are equal, a cylinder will be created instead.
Returns:
New GEOM.GEOM_Object, containing the created cone.
"""
# Example: see GEOM_TestAll.py
theR1,theR2,theH,Parameters = ParseParameters(theR1,theR2,theH)
anObj = self.PrimOp.MakeConeR1R2H(theR1, theR2, theH)
RaiseIfFailed("MakeConeR1R2H", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "cone")
return anObj
## Create a torus with given center, normal vector and radiuses.
# @param thePnt Torus central point.
# @param theVec Torus axis of symmetry.
# @param theRMajor Torus major radius.
# @param theRMinor Torus minor radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created torus.
#
# @ref tui_creation_torus "Example"
@ManageTransactions("PrimOp")
def MakeTorus(self, thePnt, theVec, theRMajor, theRMinor, theName=None):
"""
Create a torus with given center, normal vector and radiuses.
Parameters:
thePnt Torus central point.
theVec Torus axis of symmetry.
theRMajor Torus major radius.
theRMinor Torus minor radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created torus.
"""
# Example: see GEOM_TestAll.py
theRMajor,theRMinor,Parameters = ParseParameters(theRMajor,theRMinor)
anObj = self.PrimOp.MakeTorusPntVecRR(thePnt, theVec, theRMajor, theRMinor)
RaiseIfFailed("MakeTorusPntVecRR", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "torus")
return anObj
## Create a torus with given radiuses at the origin of coordinate system.
# @param theRMajor Torus major radius.
# @param theRMinor Torus minor radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created torus.
#
# @ref tui_creation_torus "Example"
@ManageTransactions("PrimOp")
def MakeTorusRR(self, theRMajor, theRMinor, theName=None):
"""
Create a torus with given radiuses at the origin of coordinate system.
Parameters:
theRMajor Torus major radius.
theRMinor Torus minor radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created torus.
"""
# Example: see GEOM_TestAll.py
theRMajor,theRMinor,Parameters = ParseParameters(theRMajor,theRMinor)
anObj = self.PrimOp.MakeTorusRR(theRMajor, theRMinor)
RaiseIfFailed("MakeTorusRR", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "torus")
return anObj
# end of l3_3d_primitives
## @}
## @addtogroup l3_complex
## @{
## Create a shape by extrusion of the base shape along a vector, defined by two points.
# @param theBase Base shape to be extruded.
# @param thePoint1 First end of extrusion vector.
# @param thePoint2 Second end of extrusion vector.
# @param theScaleFactor Use it to make prism with scaled second base.
# Nagative value means not scaled second base.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created prism.
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakePrism(self, theBase, thePoint1, thePoint2, theScaleFactor = -1.0, theName=None):
"""
Create a shape by extrusion of the base shape along a vector, defined by two points.
Parameters:
theBase Base shape to be extruded.
thePoint1 First end of extrusion vector.
thePoint2 Second end of extrusion vector.
theScaleFactor Use it to make prism with scaled second base.
Nagative value means not scaled second base.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created prism.
"""
# Example: see GEOM_TestAll.py
anObj = None
Parameters = ""
if theScaleFactor > 0:
theScaleFactor,Parameters = ParseParameters(theScaleFactor)
anObj = self.PrimOp.MakePrismTwoPntWithScaling(theBase, thePoint1, thePoint2, theScaleFactor)
else:
anObj = self.PrimOp.MakePrismTwoPnt(theBase, thePoint1, thePoint2)
RaiseIfFailed("MakePrismTwoPnt", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "prism")
return anObj
## Create a shape by extrusion of the base shape along a
# vector, defined by two points, in 2 Ways (forward/backward).
# @param theBase Base shape to be extruded.
# @param thePoint1 First end of extrusion vector.
# @param thePoint2 Second end of extrusion vector.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created prism.
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakePrism2Ways(self, theBase, thePoint1, thePoint2, theName=None):
"""
Create a shape by extrusion of the base shape along a
vector, defined by two points, in 2 Ways (forward/backward).
Parameters:
theBase Base shape to be extruded.
thePoint1 First end of extrusion vector.
thePoint2 Second end of extrusion vector.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created prism.
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakePrismTwoPnt2Ways(theBase, thePoint1, thePoint2)
RaiseIfFailed("MakePrismTwoPnt", self.PrimOp)
self._autoPublish(anObj, theName, "prism")
return anObj
## Create a shape by extrusion of the base shape along the vector,
# i.e. all the space, transfixed by the base shape during its translation
# along the vector on the given distance.
# @param theBase Base shape to be extruded.
# @param theVec Direction of extrusion.
# @param theH Prism dimension along theVec.
# @param theScaleFactor Use it to make prism with scaled second base.
# Negative value means not scaled second base.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created prism.
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakePrismVecH(self, theBase, theVec, theH, theScaleFactor = -1.0, theName=None):
"""
Create a shape by extrusion of the base shape along the vector,
i.e. all the space, transfixed by the base shape during its translation
along the vector on the given distance.
Parameters:
theBase Base shape to be extruded.
theVec Direction of extrusion.
theH Prism dimension along theVec.
theScaleFactor Use it to make prism with scaled second base.
Negative value means not scaled second base.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created prism.
"""
# Example: see GEOM_TestAll.py
anObj = None
Parameters = ""
if theScaleFactor > 0:
theH,theScaleFactor,Parameters = ParseParameters(theH,theScaleFactor)
anObj = self.PrimOp.MakePrismVecHWithScaling(theBase, theVec, theH, theScaleFactor)
else:
theH,Parameters = ParseParameters(theH)
anObj = self.PrimOp.MakePrismVecH(theBase, theVec, theH)
RaiseIfFailed("MakePrismVecH", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "prism")
return anObj
## Create a shape by extrusion of the base shape along the vector,
# i.e. all the space, transfixed by the base shape during its translation
# along the vector on the given distance in 2 Ways (forward/backward).
# @param theBase Base shape to be extruded.
# @param theVec Direction of extrusion.
# @param theH Prism dimension along theVec in forward direction.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created prism.
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakePrismVecH2Ways(self, theBase, theVec, theH, theName=None):
"""
Create a shape by extrusion of the base shape along the vector,
i.e. all the space, transfixed by the base shape during its translation
along the vector on the given distance in 2 Ways (forward/backward).
Parameters:
theBase Base shape to be extruded.
theVec Direction of extrusion.
theH Prism dimension along theVec in forward direction.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created prism.
"""
# Example: see GEOM_TestAll.py
theH,Parameters = ParseParameters(theH)
anObj = self.PrimOp.MakePrismVecH2Ways(theBase, theVec, theH)
RaiseIfFailed("MakePrismVecH2Ways", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "prism")
return anObj
## Create a shape by extrusion of the base shape along the dx, dy, dz direction
# @param theBase Base shape to be extruded.
# @param theDX, theDY, theDZ Directions of extrusion.
# @param theScaleFactor Use it to make prism with scaled second base.
# Nagative value means not scaled second base.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created prism.
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakePrismDXDYDZ(self, theBase, theDX, theDY, theDZ, theScaleFactor = -1.0, theName=None):
"""
Create a shape by extrusion of the base shape along the dx, dy, dz direction
Parameters:
theBase Base shape to be extruded.
theDX, theDY, theDZ Directions of extrusion.
theScaleFactor Use it to make prism with scaled second base.
Nagative value means not scaled second base.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created prism.
"""
# Example: see GEOM_TestAll.py
anObj = None
Parameters = ""
if theScaleFactor > 0:
theDX,theDY,theDZ,theScaleFactor,Parameters = ParseParameters(theDX, theDY, theDZ, theScaleFactor)
anObj = self.PrimOp.MakePrismDXDYDZWithScaling(theBase, theDX, theDY, theDZ, theScaleFactor)
else:
theDX,theDY,theDZ,Parameters = ParseParameters(theDX, theDY, theDZ)
anObj = self.PrimOp.MakePrismDXDYDZ(theBase, theDX, theDY, theDZ)
RaiseIfFailed("MakePrismDXDYDZ", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "prism")
return anObj
## Create a shape by extrusion of the base shape along the dx, dy, dz direction
# i.e. all the space, transfixed by the base shape during its translation
# along the vector on the given distance in 2 Ways (forward/backward).
# @param theBase Base shape to be extruded.
# @param theDX, theDY, theDZ Directions of extrusion.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created prism.
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakePrismDXDYDZ2Ways(self, theBase, theDX, theDY, theDZ, theName=None):
"""
Create a shape by extrusion of the base shape along the dx, dy, dz direction
i.e. all the space, transfixed by the base shape during its translation
along the vector on the given distance in 2 Ways (forward/backward).
Parameters:
theBase Base shape to be extruded.
theDX, theDY, theDZ Directions of extrusion.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created prism.
"""
# Example: see GEOM_TestAll.py
theDX,theDY,theDZ,Parameters = ParseParameters(theDX, theDY, theDZ)
anObj = self.PrimOp.MakePrismDXDYDZ2Ways(theBase, theDX, theDY, theDZ)
RaiseIfFailed("MakePrismDXDYDZ2Ways", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "prism")
return anObj
## Create a shape by revolution of the base shape around the axis
# on the given angle, i.e. all the space, transfixed by the base
# shape during its rotation around the axis on the given angle.
# @param theBase Base shape to be rotated.
# @param theAxis Rotation axis.
# @param theAngle Rotation angle in radians.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created revolution.
#
# @ref tui_creation_revolution "Example"
@ManageTransactions("PrimOp")
def MakeRevolution(self, theBase, theAxis, theAngle, theName=None):
"""
Create a shape by revolution of the base shape around the axis
on the given angle, i.e. all the space, transfixed by the base
shape during its rotation around the axis on the given angle.
Parameters:
theBase Base shape to be rotated.
theAxis Rotation axis.
theAngle Rotation angle in radians.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created revolution.
"""
# Example: see GEOM_TestAll.py
theAngle,Parameters = ParseParameters(theAngle)
anObj = self.PrimOp.MakeRevolutionAxisAngle(theBase, theAxis, theAngle)
RaiseIfFailed("MakeRevolutionAxisAngle", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "revolution")
return anObj
## Create a shape by revolution of the base shape around the axis
# on the given angle, i.e. all the space, transfixed by the base
# shape during its rotation around the axis on the given angle in
# both directions (forward/backward)
# @param theBase Base shape to be rotated.
# @param theAxis Rotation axis.
# @param theAngle Rotation angle in radians.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created revolution.
#
# @ref tui_creation_revolution "Example"
@ManageTransactions("PrimOp")
def MakeRevolution2Ways(self, theBase, theAxis, theAngle, theName=None):
"""
Create a shape by revolution of the base shape around the axis
on the given angle, i.e. all the space, transfixed by the base
shape during its rotation around the axis on the given angle in
both directions (forward/backward).
Parameters:
theBase Base shape to be rotated.
theAxis Rotation axis.
theAngle Rotation angle in radians.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created revolution.
"""
theAngle,Parameters = ParseParameters(theAngle)
anObj = self.PrimOp.MakeRevolutionAxisAngle2Ways(theBase, theAxis, theAngle)
RaiseIfFailed("MakeRevolutionAxisAngle2Ways", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "revolution")
return anObj
## Create a filling from the given compound of contours.
# @param theShape the compound of contours
# @param theMinDeg a minimal degree of BSpline surface to create
# @param theMaxDeg a maximal degree of BSpline surface to create
# @param theTol2D a 2d tolerance to be reached
# @param theTol3D a 3d tolerance to be reached
# @param theNbIter a number of iteration of approximation algorithm
# @param theMethod Kind of method to perform filling operation(see GEOM::filling_oper_method())
# @param isApprox if True, BSpline curves are generated in the process
# of surface construction. By default it is False, that means
# the surface is created using given curves. The usage of
# Approximation makes the algorithm work slower, but allows
# building the surface for rather complex cases.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created filling surface.
#
# @ref tui_creation_filling "Example"
@ManageTransactions("PrimOp")
def MakeFilling(self, theShape, theMinDeg=2, theMaxDeg=5, theTol2D=0.0001,
theTol3D=0.0001, theNbIter=0, theMethod=GEOM.FOM_Default, isApprox=0, theName=None):
"""
Create a filling from the given compound of contours.
Parameters:
theShape the compound of contours
theMinDeg a minimal degree of BSpline surface to create
theMaxDeg a maximal degree of BSpline surface to create
theTol2D a 2d tolerance to be reached
theTol3D a 3d tolerance to be reached
theNbIter a number of iteration of approximation algorithm
theMethod Kind of method to perform filling operation(see GEOM::filling_oper_method())
isApprox if True, BSpline curves are generated in the process
of surface construction. By default it is False, that means
the surface is created using given curves. The usage of
Approximation makes the algorithm work slower, but allows
building the surface for rather complex cases
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created filling surface.
Example of usage:
filling = geompy.MakeFilling(compound, 2, 5, 0.0001, 0.0001, 5)
"""
# Example: see GEOM_TestAll.py
theMinDeg,theMaxDeg,theTol2D,theTol3D,theNbIter,Parameters = ParseParameters(theMinDeg, theMaxDeg, theTol2D, theTol3D, theNbIter)
anObj = self.PrimOp.MakeFilling(theShape, theMinDeg, theMaxDeg,
theTol2D, theTol3D, theNbIter,
theMethod, isApprox)
RaiseIfFailed("MakeFilling", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "filling")
return anObj
## Create a filling from the given compound of contours.
# This method corresponds to MakeFilling with isApprox=True
# @param theShape the compound of contours
# @param theMinDeg a minimal degree of BSpline surface to create
# @param theMaxDeg a maximal degree of BSpline surface to create
# @param theTol3D a 3d tolerance to be reached
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created filling surface.
#
# @ref tui_creation_filling "Example"
@ManageTransactions("PrimOp")
def MakeFillingNew(self, theShape, theMinDeg=2, theMaxDeg=5, theTol3D=0.0001, theName=None):
"""
Create a filling from the given compound of contours.
This method corresponds to MakeFilling with isApprox=True
Parameters:
theShape the compound of contours
theMinDeg a minimal degree of BSpline surface to create
theMaxDeg a maximal degree of BSpline surface to create
theTol3D a 3d tolerance to be reached
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created filling surface.
Example of usage:
filling = geompy.MakeFillingNew(compound, 2, 5, 0.0001)
"""
# Example: see GEOM_TestAll.py
theMinDeg,theMaxDeg,theTol3D,Parameters = ParseParameters(theMinDeg, theMaxDeg, theTol3D)
anObj = self.PrimOp.MakeFilling(theShape, theMinDeg, theMaxDeg,
0, theTol3D, 0, GEOM.FOM_Default, True)
RaiseIfFailed("MakeFillingNew", self.PrimOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "filling")
return anObj
## Create a shell or solid passing through set of sections.Sections should be wires,edges or vertices.
# @param theSeqSections - set of specified sections.
# @param theModeSolid - mode defining building solid or shell
# @param thePreci - precision 3D used for smoothing
# @param theRuled - mode defining type of the result surfaces (ruled or smoothed).
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created shell or solid.
#
# @ref swig_todo "Example"
@ManageTransactions("PrimOp")
def MakeThruSections(self, theSeqSections, theModeSolid, thePreci, theRuled, theName=None):
"""
Create a shell or solid passing through set of sections.Sections should be wires,edges or vertices.
Parameters:
theSeqSections - set of specified sections.
theModeSolid - mode defining building solid or shell
thePreci - precision 3D used for smoothing
theRuled - mode defining type of the result surfaces (ruled or smoothed).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created shell or solid.
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakeThruSections(theSeqSections,theModeSolid,thePreci,theRuled)
RaiseIfFailed("MakeThruSections", self.PrimOp)
self._autoPublish(anObj, theName, "filling")
return anObj
## Create a shape by extrusion of the base shape along
# the path shape. The path shape can be a wire or an edge.
# @param theBase Base shape to be extruded.
# @param thePath Path shape to extrude the base shape along it.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created pipe.
#
# @ref tui_creation_pipe "Example"
@ManageTransactions("PrimOp")
def MakePipe(self, theBase, thePath, theName=None):
"""
Create a shape by extrusion of the base shape along
the path shape. The path shape can be a wire or an edge.
Parameters:
theBase Base shape to be extruded.
thePath Path shape to extrude the base shape along it.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created pipe.
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakePipe(theBase, thePath)
RaiseIfFailed("MakePipe", self.PrimOp)
self._autoPublish(anObj, theName, "pipe")
return anObj
## Create a shape by extrusion of the profile shape along
# the path shape. The path shape can be a wire or an edge.
# the several profiles can be specified in the several locations of path.
# @param theSeqBases - list of Bases shape to be extruded.
# @param theLocations - list of locations on the path corresponding
# specified list of the Bases shapes. Number of locations
# should be equal to number of bases or list of locations can be empty.
# @param thePath - Path shape to extrude the base shape along it.
# @param theWithContact - the mode defining that the section is translated to be in
# contact with the spine.
# @param theWithCorrection - defining that the section is rotated to be
# orthogonal to the spine tangent in the correspondent point
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created pipe.
#
# @ref tui_creation_pipe_with_diff_sec "Example"
@ManageTransactions("PrimOp")
def MakePipeWithDifferentSections(self, theSeqBases,
theLocations, thePath,
theWithContact, theWithCorrection, theName=None):
"""
Create a shape by extrusion of the profile shape along
the path shape. The path shape can be a wire or an edge.
the several profiles can be specified in the several locations of path.
Parameters:
theSeqBases - list of Bases shape to be extruded.
theLocations - list of locations on the path corresponding
specified list of the Bases shapes. Number of locations
should be equal to number of bases or list of locations can be empty.
thePath - Path shape to extrude the base shape along it.
theWithContact - the mode defining that the section is translated to be in
contact with the spine(0/1)
theWithCorrection - defining that the section is rotated to be
orthogonal to the spine tangent in the correspondent point (0/1)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created pipe.
"""
anObj = self.PrimOp.MakePipeWithDifferentSections(theSeqBases,
theLocations, thePath,
theWithContact, theWithCorrection)
RaiseIfFailed("MakePipeWithDifferentSections", self.PrimOp)
self._autoPublish(anObj, theName, "pipe")
return anObj
## Create a shape by extrusion of the profile shape along
# the path shape. The path shape can be a wire or a edge.
# the several profiles can be specified in the several locations of path.
# @param theSeqBases - list of Bases shape to be extruded. Base shape must be
# shell or face. If number of faces in neighbour sections
# aren't coincided result solid between such sections will
# be created using external boundaries of this shells.
# @param theSeqSubBases - list of corresponding sub-shapes of section shapes.
# This list is used for searching correspondences between
# faces in the sections. Size of this list must be equal
# to size of list of base shapes.
# @param theLocations - list of locations on the path corresponding
# specified list of the Bases shapes. Number of locations
# should be equal to number of bases. First and last
# locations must be coincided with first and last vertexes
# of path correspondingly.
# @param thePath - Path shape to extrude the base shape along it.
# @param theWithContact - the mode defining that the section is translated to be in
# contact with the spine.
# @param theWithCorrection - defining that the section is rotated to be
# orthogonal to the spine tangent in the correspondent point
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created solids.
#
# @ref tui_creation_pipe_with_shell_sec "Example"
@ManageTransactions("PrimOp")
def MakePipeWithShellSections(self, theSeqBases, theSeqSubBases,
theLocations, thePath,
theWithContact, theWithCorrection, theName=None):
"""
Create a shape by extrusion of the profile shape along
the path shape. The path shape can be a wire or a edge.
the several profiles can be specified in the several locations of path.
Parameters:
theSeqBases - list of Bases shape to be extruded. Base shape must be
shell or face. If number of faces in neighbour sections
aren't coincided result solid between such sections will
be created using external boundaries of this shells.
theSeqSubBases - list of corresponding sub-shapes of section shapes.
This list is used for searching correspondences between
faces in the sections. Size of this list must be equal
to size of list of base shapes.
theLocations - list of locations on the path corresponding
specified list of the Bases shapes. Number of locations
should be equal to number of bases. First and last
locations must be coincided with first and last vertexes
of path correspondingly.
thePath - Path shape to extrude the base shape along it.
theWithContact - the mode defining that the section is translated to be in
contact with the spine (0/1)
theWithCorrection - defining that the section is rotated to be
orthogonal to the spine tangent in the correspondent point (0/1)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created solids.
"""
anObj = self.PrimOp.MakePipeWithShellSections(theSeqBases, theSeqSubBases,
theLocations, thePath,
theWithContact, theWithCorrection)
RaiseIfFailed("MakePipeWithShellSections", self.PrimOp)
self._autoPublish(anObj, theName, "pipe")
return anObj
## Create a shape by extrusion of the profile shape along
# the path shape. This function is used only for debug pipe
# functionality - it is a version of function MakePipeWithShellSections()
# which give a possibility to recieve information about
# creating pipe between each pair of sections step by step.
@ManageTransactions("PrimOp")
def MakePipeWithShellSectionsBySteps(self, theSeqBases, theSeqSubBases,
theLocations, thePath,
theWithContact, theWithCorrection, theName=None):
"""
Create a shape by extrusion of the profile shape along
the path shape. This function is used only for debug pipe
functionality - it is a version of previous function
geompy.MakePipeWithShellSections() which give a possibility to
recieve information about creating pipe between each pair of
sections step by step.
"""
res = []
nbsect = len(theSeqBases)
nbsubsect = len(theSeqSubBases)
#print "nbsect = ",nbsect
for i in range(1,nbsect):
#print " i = ",i
tmpSeqBases = [ theSeqBases[i-1], theSeqBases[i] ]
tmpLocations = [ theLocations[i-1], theLocations[i] ]
tmpSeqSubBases = []
if nbsubsect>0: tmpSeqSubBases = [ theSeqSubBases[i-1], theSeqSubBases[i] ]
anObj = self.PrimOp.MakePipeWithShellSections(tmpSeqBases, tmpSeqSubBases,
tmpLocations, thePath,
theWithContact, theWithCorrection)
if self.PrimOp.IsDone() == 0:
print "Problems with pipe creation between ",i," and ",i+1," sections"
RaiseIfFailed("MakePipeWithShellSections", self.PrimOp)
break
else:
print "Pipe between ",i," and ",i+1," sections is OK"
res.append(anObj)
pass
pass
resc = self.MakeCompound(res)
#resc = self.MakeSewing(res, 0.001)
#print "resc: ",resc
self._autoPublish(resc, theName, "pipe")
return resc
## Create solids between given sections
# @param theSeqBases - list of sections (shell or face).
# @param theLocations - list of corresponding vertexes
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created solids.
#
# @ref tui_creation_pipe_without_path "Example"
@ManageTransactions("PrimOp")
def MakePipeShellsWithoutPath(self, theSeqBases, theLocations, theName=None):
"""
Create solids between given sections
Parameters:
theSeqBases - list of sections (shell or face).
theLocations - list of corresponding vertexes
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created solids.
"""
anObj = self.PrimOp.MakePipeShellsWithoutPath(theSeqBases, theLocations)
RaiseIfFailed("MakePipeShellsWithoutPath", self.PrimOp)
self._autoPublish(anObj, theName, "pipe")
return anObj
## Create a shape by extrusion of the base shape along
# the path shape with constant bi-normal direction along the given vector.
# The path shape can be a wire or an edge.
# @param theBase Base shape to be extruded.
# @param thePath Path shape to extrude the base shape along it.
# @param theVec Vector defines a constant binormal direction to keep the
# same angle beetween the direction and the sections
# along the sweep surface.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created pipe.
#
# @ref tui_creation_pipe "Example"
@ManageTransactions("PrimOp")
def MakePipeBiNormalAlongVector(self, theBase, thePath, theVec, theName=None):
"""
Create a shape by extrusion of the base shape along
the path shape with constant bi-normal direction along the given vector.
The path shape can be a wire or an edge.
Parameters:
theBase Base shape to be extruded.
thePath Path shape to extrude the base shape along it.
theVec Vector defines a constant binormal direction to keep the
same angle beetween the direction and the sections
along the sweep surface.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created pipe.
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakePipeBiNormalAlongVector(theBase, thePath, theVec)
RaiseIfFailed("MakePipeBiNormalAlongVector", self.PrimOp)
self._autoPublish(anObj, theName, "pipe")
return anObj
## Makes a thick solid from a face or a shell
# @param theShape Face or Shell to be thicken
# @param theThickness Thickness of the resulting solid
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created solid
#
@ManageTransactions("PrimOp")
def MakeThickSolid(self, theShape, theThickness, theName=None):
"""
Make a thick solid from a face or a shell
Parameters:
theShape Face or Shell to be thicken
theThickness Thickness of the resulting solid
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created solid
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakeThickening(theShape, theThickness, True)
RaiseIfFailed("MakeThickening", self.PrimOp)
self._autoPublish(anObj, theName, "pipe")
return anObj
## Modifies a face or a shell to make it a thick solid
# @param theShape Face or Shell to be thicken
# @param theThickness Thickness of the resulting solid
#
# @return The modified shape
#
@ManageTransactions("PrimOp")
def Thicken(self, theShape, theThickness):
"""
Modifies a face or a shell to make it a thick solid
Parameters:
theBase Base shape to be extruded.
thePath Path shape to extrude the base shape along it.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
The modified shape
"""
# Example: see GEOM_TestAll.py
anObj = self.PrimOp.MakeThickening(theShape, theThickness, False)
RaiseIfFailed("MakeThickening", self.PrimOp)
return anObj
## Build a middle path of a pipe-like shape.
# The path shape can be a wire or an edge.
# @param theShape It can be closed or unclosed pipe-like shell
# or a pipe-like solid.
# @param theBase1, theBase2 Two bases of the supposed pipe. This
# should be wires or faces of theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note It is not assumed that exact or approximate copy of theShape
# can be obtained by applying existing Pipe operation on the
# resulting "Path" wire taking theBase1 as the base - it is not
# always possible; though in some particular cases it might work
# it is not guaranteed. Thus, RestorePath function should not be
# considered as an exact reverse operation of the Pipe.
#
# @return New GEOM.GEOM_Object, containing an edge or wire that represent
# source pipe's "path".
#
# @ref tui_creation_pipe_path "Example"
@ManageTransactions("PrimOp")
def RestorePath (self, theShape, theBase1, theBase2, theName=None):
"""
Build a middle path of a pipe-like shape.
The path shape can be a wire or an edge.
Parameters:
theShape It can be closed or unclosed pipe-like shell
or a pipe-like solid.
theBase1, theBase2 Two bases of the supposed pipe. This
should be wires or faces of theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing an edge or wire that represent
source pipe's path.
"""
anObj = self.PrimOp.RestorePath(theShape, theBase1, theBase2)
RaiseIfFailed("RestorePath", self.PrimOp)
self._autoPublish(anObj, theName, "path")
return anObj
## Build a middle path of a pipe-like shape.
# The path shape can be a wire or an edge.
# @param theShape It can be closed or unclosed pipe-like shell
# or a pipe-like solid.
# @param listEdges1, listEdges2 Two bases of the supposed pipe. This
# should be lists of edges of theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note It is not assumed that exact or approximate copy of theShape
# can be obtained by applying existing Pipe operation on the
# resulting "Path" wire taking theBase1 as the base - it is not
# always possible; though in some particular cases it might work
# it is not guaranteed. Thus, RestorePath function should not be
# considered as an exact reverse operation of the Pipe.
#
# @return New GEOM.GEOM_Object, containing an edge or wire that represent
# source pipe's "path".
#
# @ref tui_creation_pipe_path "Example"
@ManageTransactions("PrimOp")
def RestorePathEdges (self, theShape, listEdges1, listEdges2, theName=None):
"""
Build a middle path of a pipe-like shape.
The path shape can be a wire or an edge.
Parameters:
theShape It can be closed or unclosed pipe-like shell
or a pipe-like solid.
listEdges1, listEdges2 Two bases of the supposed pipe. This
should be lists of edges of theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing an edge or wire that represent
source pipe's path.
"""
anObj = self.PrimOp.RestorePathEdges(theShape, listEdges1, listEdges2)
RaiseIfFailed("RestorePath", self.PrimOp)
self._autoPublish(anObj, theName, "path")
return anObj
# end of l3_complex
## @}
## @addtogroup l3_advanced
## @{
## Create a linear edge with specified ends.
# @param thePnt1 Point for the first end of edge.
# @param thePnt2 Point for the second end of edge.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created edge.
#
# @ref tui_creation_edge "Example"
@ManageTransactions("ShapesOp")
def MakeEdge(self, thePnt1, thePnt2, theName=None):
"""
Create a linear edge with specified ends.
Parameters:
thePnt1 Point for the first end of edge.
thePnt2 Point for the second end of edge.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created edge.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeEdge(thePnt1, thePnt2)
RaiseIfFailed("MakeEdge", self.ShapesOp)
self._autoPublish(anObj, theName, "edge")
return anObj
## Create a new edge, corresponding to the given length on the given curve.
# @param theRefCurve The referenced curve (edge).
# @param theLength Length on the referenced curve. It can be negative.
# @param theStartPoint Any point can be selected for it, the new edge will begin
# at the end of \a theRefCurve, close to the selected point.
# If None, start from the first point of \a theRefCurve.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created edge.
#
# @ref tui_creation_edge "Example"
@ManageTransactions("ShapesOp")
def MakeEdgeOnCurveByLength(self, theRefCurve, theLength, theStartPoint = None, theName=None):
"""
Create a new edge, corresponding to the given length on the given curve.
Parameters:
theRefCurve The referenced curve (edge).
theLength Length on the referenced curve. It can be negative.
theStartPoint Any point can be selected for it, the new edge will begin
at the end of theRefCurve, close to the selected point.
If None, start from the first point of theRefCurve.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created edge.
"""
# Example: see GEOM_TestAll.py
theLength, Parameters = ParseParameters(theLength)
anObj = self.ShapesOp.MakeEdgeOnCurveByLength(theRefCurve, theLength, theStartPoint)
RaiseIfFailed("MakeEdgeOnCurveByLength", self.ShapesOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "edge")
return anObj
## Create an edge from specified wire.
# @param theWire source Wire
# @param theLinearTolerance linear tolerance value (default = 1e-07)
# @param theAngularTolerance angular tolerance value (default = 1e-12)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created edge.
#
# @ref tui_creation_edge "Example"
@ManageTransactions("ShapesOp")
def MakeEdgeWire(self, theWire, theLinearTolerance = 1e-07, theAngularTolerance = 1e-12, theName=None):
"""
Create an edge from specified wire.
Parameters:
theWire source Wire
theLinearTolerance linear tolerance value (default = 1e-07)
theAngularTolerance angular tolerance value (default = 1e-12)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created edge.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeEdgeWire(theWire, theLinearTolerance, theAngularTolerance)
RaiseIfFailed("MakeEdgeWire", self.ShapesOp)
self._autoPublish(anObj, theName, "edge")
return anObj
## Create a wire from the set of edges and wires.
# @param theEdgesAndWires List of edges and/or wires.
# @param theTolerance Maximum distance between vertices, that will be merged.
# Values less than 1e-07 are equivalent to 1e-07 (Precision::Confusion())
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created wire.
#
# @ref tui_creation_wire "Example"
@ManageTransactions("ShapesOp")
def MakeWire(self, theEdgesAndWires, theTolerance = 1e-07, theName=None):
"""
Create a wire from the set of edges and wires.
Parameters:
theEdgesAndWires List of edges and/or wires.
theTolerance Maximum distance between vertices, that will be merged.
Values less than 1e-07 are equivalent to 1e-07 (Precision::Confusion()).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created wire.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeWire(theEdgesAndWires, theTolerance)
RaiseIfFailed("MakeWire", self.ShapesOp)
self._autoPublish(anObj, theName, "wire")
return anObj
## Create a face on the given wire.
# @param theWire closed Wire or Edge to build the face on.
# @param isPlanarWanted If TRUE, the algorithm tries to build a planar face.
# If the tolerance of the obtained planar face is less
# than 1e-06, this face will be returned, otherwise the
# algorithm tries to build any suitable face on the given
# wire and prints a warning message.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_creation_face "Example"
@ManageTransactions("ShapesOp")
def MakeFace(self, theWire, isPlanarWanted, theName=None):
"""
Create a face on the given wire.
Parameters:
theWire closed Wire or Edge to build the face on.
isPlanarWanted If TRUE, the algorithm tries to build a planar face.
If the tolerance of the obtained planar face is less
than 1e-06, this face will be returned, otherwise the
algorithm tries to build any suitable face on the given
wire and prints a warning message.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created face.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeFace(theWire, isPlanarWanted)
if isPlanarWanted and anObj is not None and self.ShapesOp.GetErrorCode() == "MAKE_FACE_TOLERANCE_TOO_BIG":
print "WARNING: Cannot build a planar face: required tolerance is too big. Non-planar face is built."
else:
RaiseIfFailed("MakeFace", self.ShapesOp)
self._autoPublish(anObj, theName, "face")
return anObj
## Create a face on the given wires set.
# @param theWires List of closed wires or edges to build the face on.
# @param isPlanarWanted If TRUE, the algorithm tries to build a planar face.
# If the tolerance of the obtained planar face is less
# than 1e-06, this face will be returned, otherwise the
# algorithm tries to build any suitable face on the given
# wire and prints a warning message.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_creation_face "Example"
@ManageTransactions("ShapesOp")
def MakeFaceWires(self, theWires, isPlanarWanted, theName=None):
"""
Create a face on the given wires set.
Parameters:
theWires List of closed wires or edges to build the face on.
isPlanarWanted If TRUE, the algorithm tries to build a planar face.
If the tolerance of the obtained planar face is less
than 1e-06, this face will be returned, otherwise the
algorithm tries to build any suitable face on the given
wire and prints a warning message.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created face.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeFaceWires(theWires, isPlanarWanted)
if isPlanarWanted and anObj is not None and self.ShapesOp.GetErrorCode() == "MAKE_FACE_TOLERANCE_TOO_BIG":
print "WARNING: Cannot build a planar face: required tolerance is too big. Non-planar face is built."
else:
RaiseIfFailed("MakeFaceWires", self.ShapesOp)
self._autoPublish(anObj, theName, "face")
return anObj
## See MakeFaceWires() method for details.
#
# @ref tui_creation_face "Example 1"
# \n @ref swig_MakeFaces "Example 2"
def MakeFaces(self, theWires, isPlanarWanted, theName=None):
"""
See geompy.MakeFaceWires() method for details.
"""
# Example: see GEOM_TestOthers.py
# note: auto-publishing is done in self.MakeFaceWires()
anObj = self.MakeFaceWires(theWires, isPlanarWanted, theName)
return anObj
## Create a shell from the set of faces and shells.
# @param theFacesAndShells List of faces and/or shells.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created shell.
#
# @ref tui_creation_shell "Example"
@ManageTransactions("ShapesOp")
def MakeShell(self, theFacesAndShells, theName=None):
"""
Create a shell from the set of faces and shells.
Parameters:
theFacesAndShells List of faces and/or shells.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created shell.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeShell(theFacesAndShells)
RaiseIfFailed("MakeShell", self.ShapesOp)
self._autoPublish(anObj, theName, "shell")
return anObj
## Create a solid, bounded by the given shells.
# @param theShells Sequence of bounding shells.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created solid.
#
# @ref tui_creation_solid "Example"
@ManageTransactions("ShapesOp")
def MakeSolid(self, theShells, theName=None):
"""
Create a solid, bounded by the given shells.
Parameters:
theShells Sequence of bounding shells.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created solid.
"""
# Example: see GEOM_TestAll.py
if len(theShells) == 1:
descr = self._IsGoodForSolid(theShells[0])
#if len(descr) > 0:
# raise RuntimeError, "MakeSolidShells : " + descr
if descr == "WRN_SHAPE_UNCLOSED":
raise RuntimeError, "MakeSolidShells : Unable to create solid from unclosed shape"
anObj = self.ShapesOp.MakeSolidShells(theShells)
RaiseIfFailed("MakeSolidShells", self.ShapesOp)
self._autoPublish(anObj, theName, "solid")
return anObj
## Create a compound of the given shapes.
# @param theShapes List of shapes to put in compound.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created compound.
#
# @ref tui_creation_compound "Example"
@ManageTransactions("ShapesOp")
def MakeCompound(self, theShapes, theName=None):
"""
Create a compound of the given shapes.
Parameters:
theShapes List of shapes to put in compound.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created compound.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.MakeCompound(theShapes)
RaiseIfFailed("MakeCompound", self.ShapesOp)
self._autoPublish(anObj, theName, "compound")
return anObj
# end of l3_advanced
## @}
## @addtogroup l2_measure
## @{
## Gives quantity of faces in the given shape.
# @param theShape Shape to count faces of.
# @return Quantity of faces.
#
# @ref swig_NumberOf "Example"
@ManageTransactions("ShapesOp")
def NumberOfFaces(self, theShape):
"""
Gives quantity of faces in the given shape.
Parameters:
theShape Shape to count faces of.
Returns:
Quantity of faces.
"""
# Example: see GEOM_TestOthers.py
nb_faces = self.ShapesOp.NumberOfFaces(theShape)
RaiseIfFailed("NumberOfFaces", self.ShapesOp)
return nb_faces
## Gives quantity of edges in the given shape.
# @param theShape Shape to count edges of.
# @return Quantity of edges.
#
# @ref swig_NumberOf "Example"
@ManageTransactions("ShapesOp")
def NumberOfEdges(self, theShape):
"""
Gives quantity of edges in the given shape.
Parameters:
theShape Shape to count edges of.
Returns:
Quantity of edges.
"""
# Example: see GEOM_TestOthers.py
nb_edges = self.ShapesOp.NumberOfEdges(theShape)
RaiseIfFailed("NumberOfEdges", self.ShapesOp)
return nb_edges
## Gives quantity of sub-shapes of type theShapeType in the given shape.
# @param theShape Shape to count sub-shapes of.
# @param theShapeType Type of sub-shapes to count (see ShapeType())
# @return Quantity of sub-shapes of given type.
#
# @ref swig_NumberOf "Example"
@ManageTransactions("ShapesOp")
def NumberOfSubShapes(self, theShape, theShapeType):
"""
Gives quantity of sub-shapes of type theShapeType in the given shape.
Parameters:
theShape Shape to count sub-shapes of.
theShapeType Type of sub-shapes to count (see geompy.ShapeType)
Returns:
Quantity of sub-shapes of given type.
"""
# Example: see GEOM_TestOthers.py
nb_ss = self.ShapesOp.NumberOfSubShapes(theShape, theShapeType)
RaiseIfFailed("NumberOfSubShapes", self.ShapesOp)
return nb_ss
## Gives quantity of solids in the given shape.
# @param theShape Shape to count solids in.
# @return Quantity of solids.
#
# @ref swig_NumberOf "Example"
@ManageTransactions("ShapesOp")
def NumberOfSolids(self, theShape):
"""
Gives quantity of solids in the given shape.
Parameters:
theShape Shape to count solids in.
Returns:
Quantity of solids.
"""
# Example: see GEOM_TestOthers.py
nb_solids = self.ShapesOp.NumberOfSubShapes(theShape, self.ShapeType["SOLID"])
RaiseIfFailed("NumberOfSolids", self.ShapesOp)
return nb_solids
# end of l2_measure
## @}
## @addtogroup l3_healing
## @{
## Reverses an orientation the given shape.
# @param theShape Shape to be reversed.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return The reversed copy of theShape.
#
# @ref swig_ChangeOrientation "Example"
@ManageTransactions("ShapesOp")
def ChangeOrientation(self, theShape, theName=None):
"""
Reverses an orientation the given shape.
Parameters:
theShape Shape to be reversed.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
The reversed copy of theShape.
"""
# Example: see GEOM_TestAll.py
anObj = self.ShapesOp.ChangeOrientation(theShape)
RaiseIfFailed("ChangeOrientation", self.ShapesOp)
self._autoPublish(anObj, theName, "reversed")
return anObj
## See ChangeOrientation() method for details.
#
# @ref swig_OrientationChange "Example"
def OrientationChange(self, theShape, theName=None):
"""
See geompy.ChangeOrientation method for details.
"""
# Example: see GEOM_TestOthers.py
# note: auto-publishing is done in self.ChangeOrientation()
anObj = self.ChangeOrientation(theShape, theName)
return anObj
# end of l3_healing
## @}
## @addtogroup l4_obtain
## @{
## Retrieve all free faces from the given shape.
# Free face is a face, which is not shared between two shells of the shape.
# @param theShape Shape to find free faces in.
# @return List of IDs of all free faces, contained in theShape.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("ShapesOp")
def GetFreeFacesIDs(self,theShape):
"""
Retrieve all free faces from the given shape.
Free face is a face, which is not shared between two shells of the shape.
Parameters:
theShape Shape to find free faces in.
Returns:
List of IDs of all free faces, contained in theShape.
"""
# Example: see GEOM_TestOthers.py
anIDs = self.ShapesOp.GetFreeFacesIDs(theShape)
RaiseIfFailed("GetFreeFacesIDs", self.ShapesOp)
return anIDs
## Get all sub-shapes of theShape1 of the given type, shared with theShape2.
# @param theShape1 Shape to find sub-shapes in.
# @param theShape2 Shape to find shared sub-shapes with.
# @param theShapeType Type of sub-shapes to be retrieved.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of sub-shapes of theShape1, shared with theShape2.
#
# @ref swig_GetSharedShapes "Example"
@ManageTransactions("ShapesOp")
def GetSharedShapes(self, theShape1, theShape2, theShapeType, theName=None):
"""
Get all sub-shapes of theShape1 of the given type, shared with theShape2.
Parameters:
theShape1 Shape to find sub-shapes in.
theShape2 Shape to find shared sub-shapes with.
theShapeType Type of sub-shapes to be retrieved.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of sub-shapes of theShape1, shared with theShape2.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetSharedShapes(theShape1, theShape2, theShapeType)
RaiseIfFailed("GetSharedShapes", self.ShapesOp)
self._autoPublish(aList, theName, "shared")
return aList
## Get all sub-shapes, shared by all shapes in the list <VAR>theShapes</VAR>.
# @param theShapes Shapes to find common sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of objects, that are sub-shapes of all given shapes.
#
# @ref swig_GetSharedShapes "Example"
@ManageTransactions("ShapesOp")
def GetSharedShapesMulti(self, theShapes, theShapeType, theName=None):
"""
Get all sub-shapes, shared by all shapes in the list theShapes.
Parameters:
theShapes Shapes to find common sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of GEOM.GEOM_Object, that are sub-shapes of all given shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetSharedShapesMulti(theShapes, theShapeType)
RaiseIfFailed("GetSharedShapesMulti", self.ShapesOp)
self._autoPublish(aList, theName, "shared")
return aList
## Find in <VAR>theShape</VAR> all sub-shapes of type <VAR>theShapeType</VAR>,
# situated relatively the specified plane by the certain way,
# defined through <VAR>theState</VAR> parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAx1 Vector (or line, or linear edge), specifying normal
# direction and location of the plane to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnPlane "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnPlane(self, theShape, theShapeType, theAx1, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified plane by the certain way,
defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAx1 Vector (or line, or linear edge), specifying normal
direction and location of the plane to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnPlane(theShape, theShapeType, theAx1, theState)
RaiseIfFailed("GetShapesOnPlane", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnPlane")
return aList
## Find in <VAR>theShape</VAR> all sub-shapes of type <VAR>theShapeType</VAR>,
# situated relatively the specified plane by the certain way,
# defined through <VAR>theState</VAR> parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAx1 Vector (or line, or linear edge), specifying normal
# direction and location of the plane to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnPlaneIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnPlaneIDs(self, theShape, theShapeType, theAx1, theState):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified plane by the certain way,
defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAx1 Vector (or line, or linear edge), specifying normal
direction and location of the plane to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnPlaneIDs(theShape, theShapeType, theAx1, theState)
RaiseIfFailed("GetShapesOnPlaneIDs", self.ShapesOp)
return aList
## Find in <VAR>theShape</VAR> all sub-shapes of type <VAR>theShapeType</VAR>,
# situated relatively the specified plane by the certain way,
# defined through <VAR>theState</VAR> parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAx1 Vector (or line, or linear edge), specifying normal
# direction of the plane to find shapes on.
# @param thePnt Point specifying location of the plane to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnPlaneWithLocation "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnPlaneWithLocation(self, theShape, theShapeType, theAx1, thePnt, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified plane by the certain way,
defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAx1 Vector (or line, or linear edge), specifying normal
direction and location of the plane to find shapes on.
thePnt Point specifying location of the plane to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnPlaneWithLocation(theShape, theShapeType,
theAx1, thePnt, theState)
RaiseIfFailed("GetShapesOnPlaneWithLocation", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnPlane")
return aList
## Find in <VAR>theShape</VAR> all sub-shapes of type <VAR>theShapeType</VAR>,
# situated relatively the specified plane by the certain way,
# defined through <VAR>theState</VAR> parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAx1 Vector (or line, or linear edge), specifying normal
# direction of the plane to find shapes on.
# @param thePnt Point specifying location of the plane to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnPlaneWithLocationIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnPlaneWithLocationIDs(self, theShape, theShapeType, theAx1, thePnt, theState):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified plane by the certain way,
defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAx1 Vector (or line, or linear edge), specifying normal
direction and location of the plane to find shapes on.
thePnt Point specifying location of the plane to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnPlaneWithLocationIDs(theShape, theShapeType,
theAx1, thePnt, theState)
RaiseIfFailed("GetShapesOnPlaneWithLocationIDs", self.ShapesOp)
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified cylinder by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAxis Vector (or line, or linear edge), specifying
# axis of the cylinder to find shapes on.
# @param theRadius Radius of the cylinder to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnCylinder "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnCylinder(self, theShape, theShapeType, theAxis, theRadius, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified cylinder by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAxis Vector (or line, or linear edge), specifying
axis of the cylinder to find shapes on.
theRadius Radius of the cylinder to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnCylinder(theShape, theShapeType, theAxis, theRadius, theState)
RaiseIfFailed("GetShapesOnCylinder", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnCylinder")
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified cylinder by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAxis Vector (or line, or linear edge), specifying
# axis of the cylinder to find shapes on.
# @param theRadius Radius of the cylinder to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnCylinderIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnCylinderIDs(self, theShape, theShapeType, theAxis, theRadius, theState):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified cylinder by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAxis Vector (or line, or linear edge), specifying
axis of the cylinder to find shapes on.
theRadius Radius of the cylinder to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnCylinderIDs(theShape, theShapeType, theAxis, theRadius, theState)
RaiseIfFailed("GetShapesOnCylinderIDs", self.ShapesOp)
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified cylinder by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAxis Vector (or line, or linear edge), specifying
# axis of the cylinder to find shapes on.
# @param thePnt Point specifying location of the bottom of the cylinder.
# @param theRadius Radius of the cylinder to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnCylinderWithLocation "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnCylinderWithLocation(self, theShape, theShapeType, theAxis, thePnt, theRadius, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified cylinder by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAxis Vector (or line, or linear edge), specifying
axis of the cylinder to find shapes on.
theRadius Radius of the cylinder to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnCylinderWithLocation(theShape, theShapeType, theAxis, thePnt, theRadius, theState)
RaiseIfFailed("GetShapesOnCylinderWithLocation", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnCylinder")
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified cylinder by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theAxis Vector (or line, or linear edge), specifying
# axis of the cylinder to find shapes on.
# @param thePnt Point specifying location of the bottom of the cylinder.
# @param theRadius Radius of the cylinder to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices
#
# @ref swig_GetShapesOnCylinderWithLocationIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnCylinderWithLocationIDs(self, theShape, theShapeType, theAxis, thePnt, theRadius, theState):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified cylinder by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theAxis Vector (or line, or linear edge), specifying
axis of the cylinder to find shapes on.
theRadius Radius of the cylinder to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnCylinderWithLocationIDs(theShape, theShapeType, theAxis, thePnt, theRadius, theState)
RaiseIfFailed("GetShapesOnCylinderWithLocationIDs", self.ShapesOp)
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified sphere by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theCenter Point, specifying center of the sphere to find shapes on.
# @param theRadius Radius of the sphere to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnSphere "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnSphere(self, theShape, theShapeType, theCenter, theRadius, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified sphere by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theCenter Point, specifying center of the sphere to find shapes on.
theRadius Radius of the sphere to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnSphere(theShape, theShapeType, theCenter, theRadius, theState)
RaiseIfFailed("GetShapesOnSphere", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnSphere")
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified sphere by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theCenter Point, specifying center of the sphere to find shapes on.
# @param theRadius Radius of the sphere to find shapes on.
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnSphereIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnSphereIDs(self, theShape, theShapeType, theCenter, theRadius, theState):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified sphere by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theCenter Point, specifying center of the sphere to find shapes on.
theRadius Radius of the sphere to find shapes on.
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnSphereIDs(theShape, theShapeType, theCenter, theRadius, theState)
RaiseIfFailed("GetShapesOnSphereIDs", self.ShapesOp)
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified quadrangle by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theTopLeftPoint Point, specifying top left corner of a quadrangle
# @param theTopRigthPoint Point, specifying top right corner of a quadrangle
# @param theBottomLeftPoint Point, specifying bottom left corner of a quadrangle
# @param theBottomRigthPoint Point, specifying bottom right corner of a quadrangle
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnQuadrangle "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnQuadrangle(self, theShape, theShapeType,
theTopLeftPoint, theTopRigthPoint,
theBottomLeftPoint, theBottomRigthPoint, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified quadrangle by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theTopLeftPoint Point, specifying top left corner of a quadrangle
theTopRigthPoint Point, specifying top right corner of a quadrangle
theBottomLeftPoint Point, specifying bottom left corner of a quadrangle
theBottomRigthPoint Point, specifying bottom right corner of a quadrangle
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnQuadrangle(theShape, theShapeType,
theTopLeftPoint, theTopRigthPoint,
theBottomLeftPoint, theBottomRigthPoint, theState)
RaiseIfFailed("GetShapesOnQuadrangle", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnQuadrangle")
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified quadrangle by the certain way, defined through \a theState parameter.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theTopLeftPoint Point, specifying top left corner of a quadrangle
# @param theTopRigthPoint Point, specifying top right corner of a quadrangle
# @param theBottomLeftPoint Point, specifying bottom left corner of a quadrangle
# @param theBottomRigthPoint Point, specifying bottom right corner of a quadrangle
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnQuadrangleIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnQuadrangleIDs(self, theShape, theShapeType,
theTopLeftPoint, theTopRigthPoint,
theBottomLeftPoint, theBottomRigthPoint, theState):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified quadrangle by the certain way, defined through theState parameter.
Parameters:
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theTopLeftPoint Point, specifying top left corner of a quadrangle
theTopRigthPoint Point, specifying top right corner of a quadrangle
theBottomLeftPoint Point, specifying bottom left corner of a quadrangle
theBottomRigthPoint Point, specifying bottom right corner of a quadrangle
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnQuadrangleIDs(theShape, theShapeType,
theTopLeftPoint, theTopRigthPoint,
theBottomLeftPoint, theBottomRigthPoint, theState)
RaiseIfFailed("GetShapesOnQuadrangleIDs", self.ShapesOp)
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified \a theBox by the certain way, defined through \a theState parameter.
# @param theBox Shape for relative comparing.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnBox "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnBox(self, theBox, theShape, theShapeType, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified theBox by the certain way, defined through theState parameter.
Parameters:
theBox Shape for relative comparing.
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnBox(theBox, theShape, theShapeType, theState)
RaiseIfFailed("GetShapesOnBox", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnBox")
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType, situated relatively
# the specified \a theBox by the certain way, defined through \a theState parameter.
# @param theBox Shape for relative comparing.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnBoxIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnBoxIDs(self, theBox, theShape, theShapeType, theState):
"""
Find in theShape all sub-shapes of type theShapeType, situated relatively
the specified theBox by the certain way, defined through theState parameter.
Parameters:
theBox Shape for relative comparing.
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnBoxIDs(theBox, theShape, theShapeType, theState)
RaiseIfFailed("GetShapesOnBoxIDs", self.ShapesOp)
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType,
# situated relatively the specified \a theCheckShape by the
# certain way, defined through \a theState parameter.
# @param theCheckShape Shape for relative comparing. It must be a solid.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of all found sub-shapes.
#
# @ref swig_GetShapesOnShape "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnShape(self, theCheckShape, theShape, theShapeType, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified theCheckShape by the
certain way, defined through theState parameter.
Parameters:
theCheckShape Shape for relative comparing. It must be a solid.
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of all found sub-shapes.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnShape(theCheckShape, theShape,
theShapeType, theState)
RaiseIfFailed("GetShapesOnShape", self.ShapesOp)
self._autoPublish(aList, theName, "shapeOnShape")
return aList
## Find in \a theShape all sub-shapes of type \a theShapeType,
# situated relatively the specified \a theCheckShape by the
# certain way, defined through \a theState parameter.
# @param theCheckShape Shape for relative comparing. It must be a solid.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return All found sub-shapes as compound.
#
# @ref swig_GetShapesOnShapeAsCompound "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnShapeAsCompound(self, theCheckShape, theShape, theShapeType, theState, theName=None):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified theCheckShape by the
certain way, defined through theState parameter.
Parameters:
theCheckShape Shape for relative comparing. It must be a solid.
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theState The state of the sub-shapes to find (see GEOM::shape_state)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
All found sub-shapes as compound.
"""
# Example: see GEOM_TestOthers.py
anObj = self.ShapesOp.GetShapesOnShapeAsCompound(theCheckShape, theShape,
theShapeType, theState)
RaiseIfFailed("GetShapesOnShapeAsCompound", self.ShapesOp)
self._autoPublish(anObj, theName, "shapeOnShape")
return anObj
## Find in \a theShape all sub-shapes of type \a theShapeType,
# situated relatively the specified \a theCheckShape by the
# certain way, defined through \a theState parameter.
# @param theCheckShape Shape for relative comparing. It must be a solid.
# @param theShape Shape to find sub-shapes of.
# @param theShapeType Type of sub-shapes to be retrieved (see ShapeType())
# @param theState The state of the sub-shapes to find (see GEOM::shape_state)
#
# @return List of all found sub-shapes indices.
#
# @ref swig_GetShapesOnShapeIDs "Example"
@ManageTransactions("ShapesOp")
def GetShapesOnShapeIDs(self, theCheckShape, theShape, theShapeType, theState):
"""
Find in theShape all sub-shapes of type theShapeType,
situated relatively the specified theCheckShape by the
certain way, defined through theState parameter.
Parameters:
theCheckShape Shape for relative comparing. It must be a solid.
theShape Shape to find sub-shapes of.
theShapeType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theState The state of the sub-shapes to find (see GEOM::shape_state)
Returns:
List of all found sub-shapes indices.
"""
# Example: see GEOM_TestOthers.py
aList = self.ShapesOp.GetShapesOnShapeIDs(theCheckShape, theShape,
theShapeType, theState)
RaiseIfFailed("GetShapesOnShapeIDs", self.ShapesOp)
return aList
## Get sub-shape(s) of theShapeWhere, which are
# coincident with \a theShapeWhat or could be a part of it.
# @param theShapeWhere Shape to find sub-shapes of.
# @param theShapeWhat Shape, specifying what to find.
# @param isNewImplementation implementation of GetInPlace functionality
# (default = False, old alghorithm based on shape properties)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return Group of all found sub-shapes or a single found sub-shape.
#
# @note This function has a restriction on argument shapes.
# If \a theShapeWhere has curved parts with significantly
# outstanding centres (i.e. the mass centre of a part is closer to
# \a theShapeWhat than to the part), such parts will not be found.
# @image html get_in_place_lost_part.png
#
# @ref swig_GetInPlace "Example"
@ManageTransactions("ShapesOp")
def GetInPlace(self, theShapeWhere, theShapeWhat, isNewImplementation = False, theName=None):
"""
Get sub-shape(s) of theShapeWhere, which are
coincident with theShapeWhat or could be a part of it.
Parameters:
theShapeWhere Shape to find sub-shapes of.
theShapeWhat Shape, specifying what to find.
isNewImplementation Implementation of GetInPlace functionality
(default = False, old alghorithm based on shape properties)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
Group of all found sub-shapes or a single found sub-shape.
Note:
This function has a restriction on argument shapes.
If theShapeWhere has curved parts with significantly
outstanding centres (i.e. the mass centre of a part is closer to
theShapeWhat than to the part), such parts will not be found.
"""
# Example: see GEOM_TestOthers.py
anObj = None
if isNewImplementation:
anObj = self.ShapesOp.GetInPlace(theShapeWhere, theShapeWhat)
else:
anObj = self.ShapesOp.GetInPlaceOld(theShapeWhere, theShapeWhat)
pass
RaiseIfFailed("GetInPlace", self.ShapesOp)
self._autoPublish(anObj, theName, "inplace")
return anObj
## Get sub-shape(s) of \a theShapeWhere, which are
# coincident with \a theShapeWhat or could be a part of it.
#
# Implementation of this method is based on a saved history of an operation,
# produced \a theShapeWhere. The \a theShapeWhat must be among this operation's
# arguments (an argument shape or a sub-shape of an argument shape).
# The operation could be the Partition or one of boolean operations,
# performed on simple shapes (not on compounds).
#
# @param theShapeWhere Shape to find sub-shapes of.
# @param theShapeWhat Shape, specifying what to find (must be in the
# building history of the ShapeWhere).
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return Group of all found sub-shapes or a single found sub-shape.
#
# @ref swig_GetInPlace "Example"
@ManageTransactions("ShapesOp")
def GetInPlaceByHistory(self, theShapeWhere, theShapeWhat, theName=None):
"""
Implementation of this method is based on a saved history of an operation,
produced theShapeWhere. The theShapeWhat must be among this operation's
arguments (an argument shape or a sub-shape of an argument shape).
The operation could be the Partition or one of boolean operations,
performed on simple shapes (not on compounds).
Parameters:
theShapeWhere Shape to find sub-shapes of.
theShapeWhat Shape, specifying what to find (must be in the
building history of the ShapeWhere).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
Group of all found sub-shapes or a single found sub-shape.
"""
# Example: see GEOM_TestOthers.py
anObj = self.ShapesOp.GetInPlaceByHistory(theShapeWhere, theShapeWhat)
RaiseIfFailed("GetInPlaceByHistory", self.ShapesOp)
self._autoPublish(anObj, theName, "inplace")
return anObj
## Get sub-shape of theShapeWhere, which is
# equal to \a theShapeWhat.
# @param theShapeWhere Shape to find sub-shape of.
# @param theShapeWhat Shape, specifying what to find.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object for found sub-shape.
#
# @ref swig_GetSame "Example"
@ManageTransactions("ShapesOp")
def GetSame(self, theShapeWhere, theShapeWhat, theName=None):
"""
Get sub-shape of theShapeWhere, which is
equal to theShapeWhat.
Parameters:
theShapeWhere Shape to find sub-shape of.
theShapeWhat Shape, specifying what to find.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object for found sub-shape.
"""
anObj = self.ShapesOp.GetSame(theShapeWhere, theShapeWhat)
RaiseIfFailed("GetSame", self.ShapesOp)
self._autoPublish(anObj, theName, "sameShape")
return anObj
## Get sub-shape indices of theShapeWhere, which is
# equal to \a theShapeWhat.
# @param theShapeWhere Shape to find sub-shape of.
# @param theShapeWhat Shape, specifying what to find.
# @return List of all found sub-shapes indices.
#
# @ref swig_GetSame "Example"
@ManageTransactions("ShapesOp")
def GetSameIDs(self, theShapeWhere, theShapeWhat):
"""
Get sub-shape indices of theShapeWhere, which is
equal to theShapeWhat.
Parameters:
theShapeWhere Shape to find sub-shape of.
theShapeWhat Shape, specifying what to find.
Returns:
List of all found sub-shapes indices.
"""
anObj = self.ShapesOp.GetSameIDs(theShapeWhere, theShapeWhat)
RaiseIfFailed("GetSameIDs", self.ShapesOp)
return anObj
# end of l4_obtain
## @}
## @addtogroup l4_access
## @{
## Obtain a composite sub-shape of <VAR>aShape</VAR>, composed from sub-shapes
# of aShape, selected by their unique IDs inside <VAR>aShape</VAR>
# @param aShape Shape to get sub-shape of.
# @param ListOfID List of sub-shapes indices.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return Found sub-shape.
#
# @ref swig_all_decompose "Example"
def GetSubShape(self, aShape, ListOfID, theName=None):
"""
Obtain a composite sub-shape of aShape, composed from sub-shapes
of aShape, selected by their unique IDs inside aShape
Parameters:
aShape Shape to get sub-shape of.
ListOfID List of sub-shapes indices.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
Found sub-shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.AddSubShape(aShape,ListOfID)
self._autoPublish(anObj, theName, "subshape")
return anObj
## Obtain unique ID of sub-shape <VAR>aSubShape</VAR> inside <VAR>aShape</VAR>
# of aShape, selected by their unique IDs inside <VAR>aShape</VAR>
# @param aShape Shape to get sub-shape of.
# @param aSubShape Sub-shapes of aShape.
# @return ID of found sub-shape.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("LocalOp")
def GetSubShapeID(self, aShape, aSubShape):
"""
Obtain unique ID of sub-shape aSubShape inside aShape
of aShape, selected by their unique IDs inside aShape
Parameters:
aShape Shape to get sub-shape of.
aSubShape Sub-shapes of aShape.
Returns:
ID of found sub-shape.
"""
# Example: see GEOM_TestAll.py
anID = self.LocalOp.GetSubShapeIndex(aShape, aSubShape)
RaiseIfFailed("GetSubShapeIndex", self.LocalOp)
return anID
## Obtain unique IDs of sub-shapes <VAR>aSubShapes</VAR> inside <VAR>aShape</VAR>
# This function is provided for performance purpose. The complexity is O(n) with n
# the number of subobjects of aShape
# @param aShape Shape to get sub-shape of.
# @param aSubShapes Sub-shapes of aShape.
# @return list of IDs of found sub-shapes.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def GetSubShapesIDs(self, aShape, aSubShapes):
"""
Obtain a list of IDs of sub-shapes aSubShapes inside aShape
This function is provided for performance purpose. The complexity is O(n) with n
the number of subobjects of aShape
Parameters:
aShape Shape to get sub-shape of.
aSubShapes Sub-shapes of aShape.
Returns:
List of IDs of found sub-shape.
"""
# Example: see GEOM_TestAll.py
anIDs = self.ShapesOp.GetSubShapesIndices(aShape, aSubShapes)
RaiseIfFailed("GetSubShapesIndices", self.ShapesOp)
return anIDs
# end of l4_access
## @}
## @addtogroup l4_decompose
## @{
## Get all sub-shapes and groups of \a theShape,
# that were created already by any other methods.
# @param theShape Any shape.
# @param theGroupsOnly If this parameter is TRUE, only groups will be
# returned, else all found sub-shapes and groups.
# @return List of existing sub-objects of \a theShape.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def GetExistingSubObjects(self, theShape, theGroupsOnly = False):
"""
Get all sub-shapes and groups of theShape,
that were created already by any other methods.
Parameters:
theShape Any shape.
theGroupsOnly If this parameter is TRUE, only groups will be
returned, else all found sub-shapes and groups.
Returns:
List of existing sub-objects of theShape.
"""
# Example: see GEOM_TestAll.py
ListObj = self.ShapesOp.GetExistingSubObjects(theShape, theGroupsOnly)
RaiseIfFailed("GetExistingSubObjects", self.ShapesOp)
return ListObj
## Get all groups of \a theShape,
# that were created already by any other methods.
# @param theShape Any shape.
# @return List of existing groups of \a theShape.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def GetGroups(self, theShape):
"""
Get all groups of theShape,
that were created already by any other methods.
Parameters:
theShape Any shape.
Returns:
List of existing groups of theShape.
"""
# Example: see GEOM_TestAll.py
ListObj = self.ShapesOp.GetExistingSubObjects(theShape, True)
RaiseIfFailed("GetExistingSubObjects", self.ShapesOp)
return ListObj
## Explode a shape on sub-shapes of a given type.
# If the shape itself matches the type, it is also returned.
# @param aShape Shape to be exploded.
# @param aType Type of sub-shapes to be retrieved (see ShapeType())
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of sub-shapes of type theShapeType, contained in theShape.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def SubShapeAll(self, aShape, aType, theName=None):
"""
Explode a shape on sub-shapes of a given type.
If the shape itself matches the type, it is also returned.
Parameters:
aShape Shape to be exploded.
aType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of sub-shapes of type theShapeType, contained in theShape.
"""
# Example: see GEOM_TestAll.py
ListObj = self.ShapesOp.MakeAllSubShapes(aShape, EnumToLong( aType ), False)
RaiseIfFailed("SubShapeAll", self.ShapesOp)
self._autoPublish(ListObj, theName, "subshape")
return ListObj
## Explode a shape on sub-shapes of a given type.
# @param aShape Shape to be exploded.
# @param aType Type of sub-shapes to be retrieved (see ShapeType())
# @return List of IDs of sub-shapes.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def SubShapeAllIDs(self, aShape, aType):
"""
Explode a shape on sub-shapes of a given type.
Parameters:
aShape Shape to be exploded (see geompy.ShapeType)
aType Type of sub-shapes to be retrieved (see geompy.ShapeType)
Returns:
List of IDs of sub-shapes.
"""
ListObj = self.ShapesOp.GetAllSubShapesIDs(aShape, EnumToLong( aType ), False)
RaiseIfFailed("SubShapeAllIDs", self.ShapesOp)
return ListObj
## Obtain a compound of sub-shapes of <VAR>aShape</VAR>,
# selected by their indices in list of all sub-shapes of type <VAR>aType</VAR>.
# Each index is in range [1, Nb_Sub-Shapes_Of_Given_Type]
# @param aShape Shape to get sub-shape of.
# @param ListOfInd List of sub-shapes indices.
# @param aType Type of sub-shapes to be retrieved (see ShapeType())
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return A compound of sub-shapes of aShape.
#
# @ref swig_all_decompose "Example"
def SubShape(self, aShape, aType, ListOfInd, theName=None):
"""
Obtain a compound of sub-shapes of aShape,
selected by their indices in list of all sub-shapes of type aType.
Each index is in range [1, Nb_Sub-Shapes_Of_Given_Type]
Parameters:
aShape Shape to get sub-shape of.
ListOfID List of sub-shapes indices.
aType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
A compound of sub-shapes of aShape.
"""
# Example: see GEOM_TestAll.py
ListOfIDs = []
AllShapeIDsList = self.SubShapeAllIDs(aShape, EnumToLong( aType ))
for ind in ListOfInd:
ListOfIDs.append(AllShapeIDsList[ind - 1])
# note: auto-publishing is done in self.GetSubShape()
anObj = self.GetSubShape(aShape, ListOfIDs, theName)
return anObj
## Explode a shape on sub-shapes of a given type.
# Sub-shapes will be sorted taking into account their gravity centers,
# to provide stable order of sub-shapes.
# If the shape itself matches the type, it is also returned.
# @param aShape Shape to be exploded.
# @param aType Type of sub-shapes to be retrieved (see ShapeType())
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of sub-shapes of type theShapeType, contained in theShape.
#
# @ref swig_SubShapeAllSorted "Example"
@ManageTransactions("ShapesOp")
def SubShapeAllSortedCentres(self, aShape, aType, theName=None):
"""
Explode a shape on sub-shapes of a given type.
Sub-shapes will be sorted taking into account their gravity centers,
to provide stable order of sub-shapes.
If the shape itself matches the type, it is also returned.
Parameters:
aShape Shape to be exploded.
aType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of sub-shapes of type theShapeType, contained in theShape.
"""
# Example: see GEOM_TestAll.py
ListObj = self.ShapesOp.MakeAllSubShapes(aShape, EnumToLong( aType ), True)
RaiseIfFailed("SubShapeAllSortedCentres", self.ShapesOp)
self._autoPublish(ListObj, theName, "subshape")
return ListObj
## Explode a shape on sub-shapes of a given type.
# Sub-shapes will be sorted taking into account their gravity centers,
# to provide stable order of sub-shapes.
# @param aShape Shape to be exploded.
# @param aType Type of sub-shapes to be retrieved (see ShapeType())
# @return List of IDs of sub-shapes.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def SubShapeAllSortedCentresIDs(self, aShape, aType):
"""
Explode a shape on sub-shapes of a given type.
Sub-shapes will be sorted taking into account their gravity centers,
to provide stable order of sub-shapes.
Parameters:
aShape Shape to be exploded.
aType Type of sub-shapes to be retrieved (see geompy.ShapeType)
Returns:
List of IDs of sub-shapes.
"""
ListIDs = self.ShapesOp.GetAllSubShapesIDs(aShape, EnumToLong( aType ), True)
RaiseIfFailed("SubShapeAllIDs", self.ShapesOp)
return ListIDs
## Obtain a compound of sub-shapes of <VAR>aShape</VAR>,
# selected by they indices in sorted list of all sub-shapes of type <VAR>aType</VAR>.
# Each index is in range [1, Nb_Sub-Shapes_Of_Given_Type]
# @param aShape Shape to get sub-shape of.
# @param ListOfInd List of sub-shapes indices.
# @param aType Type of sub-shapes to be retrieved (see ShapeType())
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return A compound of sub-shapes of aShape.
#
# @ref swig_all_decompose "Example"
def SubShapeSortedCentres(self, aShape, aType, ListOfInd, theName=None):
"""
Obtain a compound of sub-shapes of aShape,
selected by they indices in sorted list of all sub-shapes of type aType.
Each index is in range [1, Nb_Sub-Shapes_Of_Given_Type]
Parameters:
aShape Shape to get sub-shape of.
ListOfID List of sub-shapes indices.
aType Type of sub-shapes to be retrieved (see geompy.ShapeType)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
A compound of sub-shapes of aShape.
"""
# Example: see GEOM_TestAll.py
ListOfIDs = []
AllShapeIDsList = self.SubShapeAllSortedCentresIDs(aShape, EnumToLong( aType ))
for ind in ListOfInd:
ListOfIDs.append(AllShapeIDsList[ind - 1])
# note: auto-publishing is done in self.GetSubShape()
anObj = self.GetSubShape(aShape, ListOfIDs, theName)
return anObj
## Extract shapes (excluding the main shape) of given type.
# @param aShape The shape.
# @param aType The shape type (see ShapeType())
# @param isSorted Boolean flag to switch sorting on/off.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of sub-shapes of type aType, contained in aShape.
#
# @ref swig_FilletChamfer "Example"
@ManageTransactions("ShapesOp")
def ExtractShapes(self, aShape, aType, isSorted = False, theName=None):
"""
Extract shapes (excluding the main shape) of given type.
Parameters:
aShape The shape.
aType The shape type (see geompy.ShapeType)
isSorted Boolean flag to switch sorting on/off.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of sub-shapes of type aType, contained in aShape.
"""
# Example: see GEOM_TestAll.py
ListObj = self.ShapesOp.ExtractSubShapes(aShape, EnumToLong( aType ), isSorted)
RaiseIfFailed("ExtractSubShapes", self.ShapesOp)
self._autoPublish(ListObj, theName, "subshape")
return ListObj
## Get a set of sub-shapes defined by their unique IDs inside <VAR>aShape</VAR>
# @param aShape Main shape.
# @param anIDs List of unique IDs of sub-shapes inside <VAR>aShape</VAR>.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
# @return List of GEOM.GEOM_Object, corresponding to found sub-shapes.
#
# @ref swig_all_decompose "Example"
@ManageTransactions("ShapesOp")
def SubShapes(self, aShape, anIDs, theName=None):
"""
Get a set of sub-shapes defined by their unique IDs inside theMainShape
Parameters:
aShape Main shape.
anIDs List of unique IDs of sub-shapes inside theMainShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of GEOM.GEOM_Object, corresponding to found sub-shapes.
"""
# Example: see GEOM_TestAll.py
ListObj = self.ShapesOp.MakeSubShapes(aShape, anIDs)
RaiseIfFailed("SubShapes", self.ShapesOp)
self._autoPublish(ListObj, theName, "subshape")
return ListObj
# end of l4_decompose
## @}
## @addtogroup l4_decompose_d
## @{
## Deprecated method
# It works like SubShapeAllSortedCentres(), but wrongly
# defines centres of faces, shells and solids.
@ManageTransactions("ShapesOp")
def SubShapeAllSorted(self, aShape, aType, theName=None):
"""
Deprecated method
It works like geompy.SubShapeAllSortedCentres, but wrongly
defines centres of faces, shells and solids.
"""
ListObj = self.ShapesOp.MakeExplode(aShape, EnumToLong( aType ), True)
RaiseIfFailed("MakeExplode", self.ShapesOp)
self._autoPublish(ListObj, theName, "subshape")
return ListObj
## Deprecated method
# It works like SubShapeAllSortedCentresIDs(), but wrongly
# defines centres of faces, shells and solids.
@ManageTransactions("ShapesOp")
def SubShapeAllSortedIDs(self, aShape, aType):
"""
Deprecated method
It works like geompy.SubShapeAllSortedCentresIDs, but wrongly
defines centres of faces, shells and solids.
"""
ListIDs = self.ShapesOp.SubShapeAllIDs(aShape, EnumToLong( aType ), True)
RaiseIfFailed("SubShapeAllIDs", self.ShapesOp)
return ListIDs
## Deprecated method
# It works like SubShapeSortedCentres(), but has a bug
# (wrongly defines centres of faces, shells and solids).
def SubShapeSorted(self, aShape, aType, ListOfInd, theName=None):
"""
Deprecated method
It works like geompy.SubShapeSortedCentres, but has a bug
(wrongly defines centres of faces, shells and solids).
"""
ListOfIDs = []
AllShapeIDsList = self.SubShapeAllSortedIDs(aShape, EnumToLong( aType ))
for ind in ListOfInd:
ListOfIDs.append(AllShapeIDsList[ind - 1])
# note: auto-publishing is done in self.GetSubShape()
anObj = self.GetSubShape(aShape, ListOfIDs, theName)
return anObj
# end of l4_decompose_d
## @}
## @addtogroup l3_healing
## @{
## Apply a sequence of Shape Healing operators to the given object.
# @param theShape Shape to be processed.
# @param theOperators List of names of operators ("FixShape", "SplitClosedFaces", etc.).
# @param theParameters List of names of parameters
# ("FixShape.Tolerance3d", "SplitClosedFaces.NbSplitPoints", etc.).
# @param theValues List of values of parameters, in the same order
# as parameters are listed in <VAR>theParameters</VAR> list.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# <b> Operators and Parameters: </b> \n
#
# * \b FixShape - corrects invalid shapes. \n
# - \b FixShape.Tolerance3d - work tolerance for detection of the problems and correction of them. \n
# - \b FixShape.MaxTolerance3d - maximal possible tolerance of the shape after correction. \n
#
# * \b FixFaceSize - removes small faces, such as spots and strips.\n
# - \b FixFaceSize.Tolerance - defines minimum possible face size. \n
# - \b DropSmallEdges - removes edges, which merge with neighbouring edges. \n
# - \b DropSmallEdges.Tolerance3d - defines minimum possible distance between two parallel edges.\n
#
# * \b SplitAngle - splits faces based on conical surfaces, surfaces of revolution and cylindrical
# surfaces in segments using a certain angle. \n
# - \b SplitAngle.Angle - the central angle of the resulting segments (i.e. we obtain two segments
# if Angle=180, four if Angle=90, etc). \n
# - \b SplitAngle.MaxTolerance - maximum possible tolerance among the resulting segments.\n
#
# * \b SplitClosedFaces - splits closed faces in segments.
# The number of segments depends on the number of splitting points.\n
# - \b SplitClosedFaces.NbSplitPoints - the number of splitting points.\n
#
# * \b SplitContinuity - splits shapes to reduce continuities of curves and surfaces.\n
# - \b SplitContinuity.Tolerance3d - 3D tolerance for correction of geometry.\n
# - \b SplitContinuity.SurfaceContinuity - required continuity for surfaces.\n
# - \b SplitContinuity.CurveContinuity - required continuity for curves.\n
# This and the previous parameters can take the following values:\n
# \b Parametric \b Continuity \n
# \b C0 (Positional Continuity): curves are joined (the end positions of curves or surfaces
# are coincidental. The curves or surfaces may still meet at an angle, giving rise to a sharp corner or edge).\n
# \b C1 (Tangential Continuity): first derivatives are equal (the end vectors of curves or surfaces are parallel,
# ruling out sharp edges).\n
# \b C2 (Curvature Continuity): first and second derivatives are equal (the end vectors of curves or surfaces
# are of the same magnitude).\n
# \b CN N-th derivatives are equal (both the direction and the magnitude of the Nth derivatives of curves
# or surfaces (d/du C(u)) are the same at junction. \n
# \b Geometric \b Continuity \n
# \b G1: first derivatives are proportional at junction.\n
# The curve tangents thus have the same direction, but not necessarily the same magnitude.
# i.e., C1'(1) = (a,b,c) and C2'(0) = (k*a, k*b, k*c).\n
# \b G2: first and second derivatives are proportional at junction.
# As the names imply, geometric continuity requires the geometry to be continuous, while parametric
# continuity requires that the underlying parameterization was continuous as well.
# Parametric continuity of order n implies geometric continuity of order n, but not vice-versa.\n
#
# * \b BsplineRestriction - converts curves and surfaces to Bsplines and processes them with the following parameters:\n
# - \b BSplineRestriction.SurfaceMode - approximation of surfaces if restriction is necessary.\n
# - \b BSplineRestriction.Curve3dMode - conversion of any 3D curve to BSpline and approximation.\n
# - \b BSplineRestriction.Curve2dMode - conversion of any 2D curve to BSpline and approximation.\n
# - \b BSplineRestriction.Tolerance3d - defines the possibility of surfaces and 3D curves approximation
# with the specified parameters.\n
# - \b BSplineRestriction.Tolerance2d - defines the possibility of surfaces and 2D curves approximation
# with the specified parameters.\n
# - \b BSplineRestriction.RequiredDegree - required degree of the resulting BSplines.\n
# - \b BSplineRestriction.RequiredNbSegments - required maximum number of segments of resultant BSplines.\n
# - \b BSplineRestriction.Continuity3d - continuity of the resulting surfaces and 3D curves.\n
# - \b BSplineRestriction.Continuity2d - continuity of the resulting 2D curves.\n
#
# * \b ToBezier - converts curves and surfaces of any type to Bezier curves and surfaces.\n
# - \b ToBezier.SurfaceMode - if checked in, allows conversion of surfaces.\n
# - \b ToBezier.Curve3dMode - if checked in, allows conversion of 3D curves.\n
# - \b ToBezier.Curve2dMode - if checked in, allows conversion of 2D curves.\n
# - \b ToBezier.MaxTolerance - defines tolerance for detection and correction of problems.\n
#
# * \b SameParameter - fixes edges of 2D and 3D curves not having the same parameter.\n
# - \b SameParameter.Tolerance3d - defines tolerance for fixing of edges.\n
#
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# \n @ref tui_shape_processing "Example"
@ManageTransactions("HealOp")
def ProcessShape(self, theShape, theOperators, theParameters, theValues, theName=None):
"""
Apply a sequence of Shape Healing operators to the given object.
Parameters:
theShape Shape to be processed.
theValues List of values of parameters, in the same order
as parameters are listed in theParameters list.
theOperators List of names of operators ("FixShape", "SplitClosedFaces", etc.).
theParameters List of names of parameters
("FixShape.Tolerance3d", "SplitClosedFaces.NbSplitPoints", etc.).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Operators and Parameters:
* FixShape - corrects invalid shapes.
* FixShape.Tolerance3d - work tolerance for detection of the problems and correction of them.
* FixShape.MaxTolerance3d - maximal possible tolerance of the shape after correction.
* FixFaceSize - removes small faces, such as spots and strips.
* FixFaceSize.Tolerance - defines minimum possible face size.
* DropSmallEdges - removes edges, which merge with neighbouring edges.
* DropSmallEdges.Tolerance3d - defines minimum possible distance between two parallel edges.
* SplitAngle - splits faces based on conical surfaces, surfaces of revolution and cylindrical surfaces
in segments using a certain angle.
* SplitAngle.Angle - the central angle of the resulting segments (i.e. we obtain two segments
if Angle=180, four if Angle=90, etc).
* SplitAngle.MaxTolerance - maximum possible tolerance among the resulting segments.
* SplitClosedFaces - splits closed faces in segments. The number of segments depends on the number of
splitting points.
* SplitClosedFaces.NbSplitPoints - the number of splitting points.
* SplitContinuity - splits shapes to reduce continuities of curves and surfaces.
* SplitContinuity.Tolerance3d - 3D tolerance for correction of geometry.
* SplitContinuity.SurfaceContinuity - required continuity for surfaces.
* SplitContinuity.CurveContinuity - required continuity for curves.
This and the previous parameters can take the following values:
Parametric Continuity:
C0 (Positional Continuity): curves are joined (the end positions of curves or surfaces are
coincidental. The curves or surfaces may still meet at an angle,
giving rise to a sharp corner or edge).
C1 (Tangential Continuity): first derivatives are equal (the end vectors of curves or surfaces
are parallel, ruling out sharp edges).
C2 (Curvature Continuity): first and second derivatives are equal (the end vectors of curves
or surfaces are of the same magnitude).
CN N-th derivatives are equal (both the direction and the magnitude of the Nth derivatives of
curves or surfaces (d/du C(u)) are the same at junction.
Geometric Continuity:
G1: first derivatives are proportional at junction.
The curve tangents thus have the same direction, but not necessarily the same magnitude.
i.e., C1'(1) = (a,b,c) and C2'(0) = (k*a, k*b, k*c).
G2: first and second derivatives are proportional at junction. As the names imply,
geometric continuity requires the geometry to be continuous, while parametric continuity requires
that the underlying parameterization was continuous as well. Parametric continuity of order n implies
geometric continuity of order n, but not vice-versa.
* BsplineRestriction - converts curves and surfaces to Bsplines and processes them with the following parameters:
* BSplineRestriction.SurfaceMode - approximation of surfaces if restriction is necessary.
* BSplineRestriction.Curve3dMode - conversion of any 3D curve to BSpline and approximation.
* BSplineRestriction.Curve2dMode - conversion of any 2D curve to BSpline and approximation.
* BSplineRestriction.Tolerance3d - defines the possibility of surfaces and 3D curves approximation with
the specified parameters.
* BSplineRestriction.Tolerance2d - defines the possibility of surfaces and 2D curves approximation with
the specified parameters.
* BSplineRestriction.RequiredDegree - required degree of the resulting BSplines.
* BSplineRestriction.RequiredNbSegments - required maximum number of segments of resultant BSplines.
* BSplineRestriction.Continuity3d - continuity of the resulting surfaces and 3D curves.
* BSplineRestriction.Continuity2d - continuity of the resulting 2D curves.
* ToBezier - converts curves and surfaces of any type to Bezier curves and surfaces.
* ToBezier.SurfaceMode - if checked in, allows conversion of surfaces.
* ToBezier.Curve3dMode - if checked in, allows conversion of 3D curves.
* ToBezier.Curve2dMode - if checked in, allows conversion of 2D curves.
* ToBezier.MaxTolerance - defines tolerance for detection and correction of problems.
* SameParameter - fixes edges of 2D and 3D curves not having the same parameter.
* SameParameter.Tolerance3d - defines tolerance for fixing of edges.
Returns:
New GEOM.GEOM_Object, containing processed shape.
Note: For more information look through SALOME Geometry User's Guide->
-> Introduction to Geometry-> Repairing Operations-> Shape Processing
"""
# Example: see GEOM_TestHealing.py
theValues,Parameters = ParseList(theValues)
anObj = self.HealOp.ProcessShape(theShape, theOperators, theParameters, theValues)
# To avoid script failure in case of good argument shape
if self.HealOp.GetErrorCode() == "ShHealOper_NotError_msg":
return theShape
RaiseIfFailed("ProcessShape", self.HealOp)
for string in (theOperators + theParameters):
Parameters = ":" + Parameters
pass
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "healed")
return anObj
## Remove faces from the given object (shape).
# @param theObject Shape to be processed.
# @param theFaces Indices of faces to be removed, if EMPTY then the method
# removes ALL faces of the given object.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_suppress_faces "Example"
@ManageTransactions("HealOp")
def SuppressFaces(self, theObject, theFaces, theName=None):
"""
Remove faces from the given object (shape).
Parameters:
theObject Shape to be processed.
theFaces Indices of faces to be removed, if EMPTY then the method
removes ALL faces of the given object.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
anObj = self.HealOp.SuppressFaces(theObject, theFaces)
RaiseIfFailed("SuppressFaces", self.HealOp)
self._autoPublish(anObj, theName, "suppressFaces")
return anObj
## Sewing of some shapes into single shape.
# @param ListShape Shapes to be processed.
# @param theTolerance Required tolerance value.
# @param AllowNonManifold Flag that allows non-manifold sewing.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_sewing "Example"
def MakeSewing(self, ListShape, theTolerance, AllowNonManifold=False, theName=None):
"""
Sewing of some shapes into single shape.
Parameters:
ListShape Shapes to be processed.
theTolerance Required tolerance value.
AllowNonManifold Flag that allows non-manifold sewing.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
comp = self.MakeCompound(ListShape)
# note: auto-publishing is done in self.Sew()
anObj = self.Sew(comp, theTolerance, AllowNonManifold, theName)
return anObj
## Sewing of the given object.
# @param theObject Shape to be processed.
# @param theTolerance Required tolerance value.
# @param AllowNonManifold Flag that allows non-manifold sewing.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
@ManageTransactions("HealOp")
def Sew(self, theObject, theTolerance, AllowNonManifold=False, theName=None):
"""
Sewing of the given object.
Parameters:
theObject Shape to be processed.
theTolerance Required tolerance value.
AllowNonManifold Flag that allows non-manifold sewing.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see MakeSewing() above
theTolerance,Parameters = ParseParameters(theTolerance)
if AllowNonManifold:
anObj = self.HealOp.SewAllowNonManifold(theObject, theTolerance)
else:
anObj = self.HealOp.Sew(theObject, theTolerance)
# To avoid script failure in case of good argument shape
if self.HealOp.GetErrorCode() == "ShHealOper_NotError_msg":
return theObject
RaiseIfFailed("Sew", self.HealOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "sewed")
return anObj
## Rebuild the topology of theCompound of solids by removing
# of the faces that are shared by several solids.
# @param theCompound Shape to be processed.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_remove_webs "Example"
@ManageTransactions("HealOp")
def RemoveInternalFaces (self, theCompound, theName=None):
"""
Rebuild the topology of theCompound of solids by removing
of the faces that are shared by several solids.
Parameters:
theCompound Shape to be processed.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
anObj = self.HealOp.RemoveInternalFaces(theCompound)
RaiseIfFailed("RemoveInternalFaces", self.HealOp)
self._autoPublish(anObj, theName, "removeWebs")
return anObj
## Remove internal wires and edges from the given object (face).
# @param theObject Shape to be processed.
# @param theWires Indices of wires to be removed, if EMPTY then the method
# removes ALL internal wires of the given object.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_suppress_internal_wires "Example"
@ManageTransactions("HealOp")
def SuppressInternalWires(self, theObject, theWires, theName=None):
"""
Remove internal wires and edges from the given object (face).
Parameters:
theObject Shape to be processed.
theWires Indices of wires to be removed, if EMPTY then the method
removes ALL internal wires of the given object.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
anObj = self.HealOp.RemoveIntWires(theObject, theWires)
RaiseIfFailed("RemoveIntWires", self.HealOp)
self._autoPublish(anObj, theName, "suppressWires")
return anObj
## Remove internal closed contours (holes) from the given object.
# @param theObject Shape to be processed.
# @param theWires Indices of wires to be removed, if EMPTY then the method
# removes ALL internal holes of the given object
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_suppress_holes "Example"
@ManageTransactions("HealOp")
def SuppressHoles(self, theObject, theWires, theName=None):
"""
Remove internal closed contours (holes) from the given object.
Parameters:
theObject Shape to be processed.
theWires Indices of wires to be removed, if EMPTY then the method
removes ALL internal holes of the given object
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
anObj = self.HealOp.FillHoles(theObject, theWires)
RaiseIfFailed("FillHoles", self.HealOp)
self._autoPublish(anObj, theName, "suppressHoles")
return anObj
## Close an open wire.
# @param theObject Shape to be processed.
# @param theWires Indexes of edge(s) and wire(s) to be closed within <VAR>theObject</VAR>'s shape,
# if [ ], then <VAR>theObject</VAR> itself is a wire.
# @param isCommonVertex If True : closure by creation of a common vertex,
# If False : closure by creation of an edge between ends.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_close_contour "Example"
@ManageTransactions("HealOp")
def CloseContour(self,theObject, theWires, isCommonVertex, theName=None):
"""
Close an open wire.
Parameters:
theObject Shape to be processed.
theWires Indexes of edge(s) and wire(s) to be closed within theObject's shape,
if [ ], then theObject itself is a wire.
isCommonVertex If True : closure by creation of a common vertex,
If False : closure by creation of an edge between ends.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
anObj = self.HealOp.CloseContour(theObject, theWires, isCommonVertex)
RaiseIfFailed("CloseContour", self.HealOp)
self._autoPublish(anObj, theName, "closeContour")
return anObj
## Addition of a point to a given edge object.
# @param theObject Shape to be processed.
# @param theEdgeIndex Index of edge to be divided within theObject's shape,
# if -1, then theObject itself is the edge.
# @param theValue Value of parameter on edge or length parameter,
# depending on \a isByParameter.
# @param isByParameter If TRUE : \a theValue is treated as a curve parameter [0..1], \n
# if FALSE : \a theValue is treated as a length parameter [0..1]
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_add_point_on_edge "Example"
@ManageTransactions("HealOp")
def DivideEdge(self, theObject, theEdgeIndex, theValue, isByParameter, theName=None):
"""
Addition of a point to a given edge object.
Parameters:
theObject Shape to be processed.
theEdgeIndex Index of edge to be divided within theObject's shape,
if -1, then theObject itself is the edge.
theValue Value of parameter on edge or length parameter,
depending on isByParameter.
isByParameter If TRUE : theValue is treated as a curve parameter [0..1],
if FALSE : theValue is treated as a length parameter [0..1]
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
# Example: see GEOM_TestHealing.py
theEdgeIndex,theValue,isByParameter,Parameters = ParseParameters(theEdgeIndex,theValue,isByParameter)
anObj = self.HealOp.DivideEdge(theObject, theEdgeIndex, theValue, isByParameter)
RaiseIfFailed("DivideEdge", self.HealOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "divideEdge")
return anObj
## Suppress the vertices in the wire in case if adjacent edges are C1 continuous.
# @param theWire Wire to minimize the number of C1 continuous edges in.
# @param theVertices A list of vertices to suppress. If the list
# is empty, all vertices in a wire will be assumed.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object with modified wire.
#
# @ref tui_fuse_collinear_edges "Example"
@ManageTransactions("HealOp")
def FuseCollinearEdgesWithinWire(self, theWire, theVertices = [], theName=None):
"""
Suppress the vertices in the wire in case if adjacent edges are C1 continuous.
Parameters:
theWire Wire to minimize the number of C1 continuous edges in.
theVertices A list of vertices to suppress. If the list
is empty, all vertices in a wire will be assumed.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object with modified wire.
"""
anObj = self.HealOp.FuseCollinearEdgesWithinWire(theWire, theVertices)
RaiseIfFailed("FuseCollinearEdgesWithinWire", self.HealOp)
self._autoPublish(anObj, theName, "fuseEdges")
return anObj
## Change orientation of the given object. Updates given shape.
# @param theObject Shape to be processed.
# @return Updated <var>theObject</var>
#
# @ref swig_todo "Example"
@ManageTransactions("HealOp")
def ChangeOrientationShell(self,theObject):
"""
Change orientation of the given object. Updates given shape.
Parameters:
theObject Shape to be processed.
Returns:
Updated theObject
"""
theObject = self.HealOp.ChangeOrientation(theObject)
RaiseIfFailed("ChangeOrientation", self.HealOp)
pass
## Change orientation of the given object.
# @param theObject Shape to be processed.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref swig_todo "Example"
@ManageTransactions("HealOp")
def ChangeOrientationShellCopy(self, theObject, theName=None):
"""
Change orientation of the given object.
Parameters:
theObject Shape to be processed.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
anObj = self.HealOp.ChangeOrientationCopy(theObject)
RaiseIfFailed("ChangeOrientationCopy", self.HealOp)
self._autoPublish(anObj, theName, "reversed")
return anObj
## Try to limit tolerance of the given object by value \a theTolerance.
# @param theObject Shape to be processed.
# @param theTolerance Required tolerance value.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing processed shape.
#
# @ref tui_limit_tolerance "Example"
@ManageTransactions("HealOp")
def LimitTolerance(self, theObject, theTolerance = 1e-07, theName=None):
"""
Try to limit tolerance of the given object by value theTolerance.
Parameters:
theObject Shape to be processed.
theTolerance Required tolerance value.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing processed shape.
"""
anObj = self.HealOp.LimitTolerance(theObject, theTolerance)
RaiseIfFailed("LimitTolerance", self.HealOp)
self._autoPublish(anObj, theName, "limitTolerance")
return anObj
## Get a list of wires (wrapped in GEOM.GEOM_Object-s),
# that constitute a free boundary of the given shape.
# @param theObject Shape to get free boundary of.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return [\a status, \a theClosedWires, \a theOpenWires]
# \n \a status: FALSE, if an error(s) occured during the method execution.
# \n \a theClosedWires: Closed wires on the free boundary of the given shape.
# \n \a theOpenWires: Open wires on the free boundary of the given shape.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("HealOp")
def GetFreeBoundary(self, theObject, theName=None):
"""
Get a list of wires (wrapped in GEOM.GEOM_Object-s),
that constitute a free boundary of the given shape.
Parameters:
theObject Shape to get free boundary of.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
[status, theClosedWires, theOpenWires]
status: FALSE, if an error(s) occured during the method execution.
theClosedWires: Closed wires on the free boundary of the given shape.
theOpenWires: Open wires on the free boundary of the given shape.
"""
# Example: see GEOM_TestHealing.py
anObj = self.HealOp.GetFreeBoundary(theObject)
RaiseIfFailed("GetFreeBoundary", self.HealOp)
self._autoPublish(anObj[1], theName, "closedWire")
self._autoPublish(anObj[2], theName, "openWire")
return anObj
## Replace coincident faces in theShape by one face.
# @param theShape Initial shape.
# @param theTolerance Maximum distance between faces, which can be considered as coincident.
# @param doKeepNonSolids If FALSE, only solids will present in the result,
# otherwise all initial shapes.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing a copy of theShape without coincident faces.
#
# @ref tui_glue_faces "Example"
@ManageTransactions("ShapesOp")
def MakeGlueFaces(self, theShape, theTolerance, doKeepNonSolids=True, theName=None):
"""
Replace coincident faces in theShape by one face.
Parameters:
theShape Initial shape.
theTolerance Maximum distance between faces, which can be considered as coincident.
doKeepNonSolids If FALSE, only solids will present in the result,
otherwise all initial shapes.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing a copy of theShape without coincident faces.
"""
# Example: see GEOM_Spanner.py
theTolerance,Parameters = ParseParameters(theTolerance)
anObj = self.ShapesOp.MakeGlueFaces(theShape, theTolerance, doKeepNonSolids)
if anObj is None:
raise RuntimeError, "MakeGlueFaces : " + self.ShapesOp.GetErrorCode()
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "glueFaces")
return anObj
## Find coincident faces in theShape for possible gluing.
# @param theShape Initial shape.
# @param theTolerance Maximum distance between faces,
# which can be considered as coincident.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return GEOM.ListOfGO
#
# @ref tui_glue_faces "Example"
@ManageTransactions("ShapesOp")
def GetGlueFaces(self, theShape, theTolerance, theName=None):
"""
Find coincident faces in theShape for possible gluing.
Parameters:
theShape Initial shape.
theTolerance Maximum distance between faces,
which can be considered as coincident.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
GEOM.ListOfGO
"""
anObj = self.ShapesOp.GetGlueFaces(theShape, theTolerance)
RaiseIfFailed("GetGlueFaces", self.ShapesOp)
self._autoPublish(anObj, theName, "facesToGlue")
return anObj
## Replace coincident faces in theShape by one face
# in compliance with given list of faces
# @param theShape Initial shape.
# @param theTolerance Maximum distance between faces,
# which can be considered as coincident.
# @param theFaces List of faces for gluing.
# @param doKeepNonSolids If FALSE, only solids will present in the result,
# otherwise all initial shapes.
# @param doGlueAllEdges If TRUE, all coincident edges of <VAR>theShape</VAR>
# will be glued, otherwise only the edges,
# belonging to <VAR>theFaces</VAR>.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing a copy of theShape
# without some faces.
#
# @ref tui_glue_faces "Example"
@ManageTransactions("ShapesOp")
def MakeGlueFacesByList(self, theShape, theTolerance, theFaces,
doKeepNonSolids=True, doGlueAllEdges=True, theName=None):
"""
Replace coincident faces in theShape by one face
in compliance with given list of faces
Parameters:
theShape Initial shape.
theTolerance Maximum distance between faces,
which can be considered as coincident.
theFaces List of faces for gluing.
doKeepNonSolids If FALSE, only solids will present in the result,
otherwise all initial shapes.
doGlueAllEdges If TRUE, all coincident edges of theShape
will be glued, otherwise only the edges,
belonging to theFaces.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing a copy of theShape
without some faces.
"""
anObj = self.ShapesOp.MakeGlueFacesByList(theShape, theTolerance, theFaces,
doKeepNonSolids, doGlueAllEdges)
if anObj is None:
raise RuntimeError, "MakeGlueFacesByList : " + self.ShapesOp.GetErrorCode()
self._autoPublish(anObj, theName, "glueFaces")
return anObj
## Replace coincident edges in theShape by one edge.
# @param theShape Initial shape.
# @param theTolerance Maximum distance between edges, which can be considered as coincident.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing a copy of theShape without coincident edges.
#
# @ref tui_glue_edges "Example"
@ManageTransactions("ShapesOp")
def MakeGlueEdges(self, theShape, theTolerance, theName=None):
"""
Replace coincident edges in theShape by one edge.
Parameters:
theShape Initial shape.
theTolerance Maximum distance between edges, which can be considered as coincident.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing a copy of theShape without coincident edges.
"""
theTolerance,Parameters = ParseParameters(theTolerance)
anObj = self.ShapesOp.MakeGlueEdges(theShape, theTolerance)
if anObj is None:
raise RuntimeError, "MakeGlueEdges : " + self.ShapesOp.GetErrorCode()
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "glueEdges")
return anObj
## Find coincident edges in theShape for possible gluing.
# @param theShape Initial shape.
# @param theTolerance Maximum distance between edges,
# which can be considered as coincident.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return GEOM.ListOfGO
#
# @ref tui_glue_edges "Example"
@ManageTransactions("ShapesOp")
def GetGlueEdges(self, theShape, theTolerance, theName=None):
"""
Find coincident edges in theShape for possible gluing.
Parameters:
theShape Initial shape.
theTolerance Maximum distance between edges,
which can be considered as coincident.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
GEOM.ListOfGO
"""
anObj = self.ShapesOp.GetGlueEdges(theShape, theTolerance)
RaiseIfFailed("GetGlueEdges", self.ShapesOp)
self._autoPublish(anObj, theName, "edgesToGlue")
return anObj
## Replace coincident edges in theShape by one edge
# in compliance with given list of edges.
# @param theShape Initial shape.
# @param theTolerance Maximum distance between edges,
# which can be considered as coincident.
# @param theEdges List of edges for gluing.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing a copy of theShape
# without some edges.
#
# @ref tui_glue_edges "Example"
@ManageTransactions("ShapesOp")
def MakeGlueEdgesByList(self, theShape, theTolerance, theEdges, theName=None):
"""
Replace coincident edges in theShape by one edge
in compliance with given list of edges.
Parameters:
theShape Initial shape.
theTolerance Maximum distance between edges,
which can be considered as coincident.
theEdges List of edges for gluing.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing a copy of theShape
without some edges.
"""
anObj = self.ShapesOp.MakeGlueEdgesByList(theShape, theTolerance, theEdges)
if anObj is None:
raise RuntimeError, "MakeGlueEdgesByList : " + self.ShapesOp.GetErrorCode()
self._autoPublish(anObj, theName, "glueEdges")
return anObj
# end of l3_healing
## @}
## @addtogroup l3_boolean Boolean Operations
## @{
# -----------------------------------------------------------------------------
# Boolean (Common, Cut, Fuse, Section)
# -----------------------------------------------------------------------------
## Perform one of boolean operations on two given shapes.
# @param theShape1 First argument for boolean operation.
# @param theShape2 Second argument for boolean operation.
# @param theOperation Indicates the operation to be done:\n
# 1 - Common, 2 - Cut, 3 - Fuse, 4 - Section.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fuse "Example"
@ManageTransactions("BoolOp")
def MakeBoolean(self, theShape1, theShape2, theOperation, checkSelfInte=False, theName=None):
"""
Perform one of boolean operations on two given shapes.
Parameters:
theShape1 First argument for boolean operation.
theShape2 Second argument for boolean operation.
theOperation Indicates the operation to be done:
1 - Common, 2 - Cut, 3 - Fuse, 4 - Section.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.BoolOp.MakeBoolean(theShape1, theShape2, theOperation, checkSelfInte)
RaiseIfFailed("MakeBoolean", self.BoolOp)
def_names = { 1: "common", 2: "cut", 3: "fuse", 4: "section" }
self._autoPublish(anObj, theName, def_names[theOperation])
return anObj
## Perform Common boolean operation on two given shapes.
# @param theShape1 First argument for boolean operation.
# @param theShape2 Second argument for boolean operation.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_common "Example 1"
# \n @ref swig_MakeCommon "Example 2"
def MakeCommon(self, theShape1, theShape2, checkSelfInte=False, theName=None):
"""
Perform Common boolean operation on two given shapes.
Parameters:
theShape1 First argument for boolean operation.
theShape2 Second argument for boolean operation.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
# note: auto-publishing is done in self.MakeBoolean()
return self.MakeBoolean(theShape1, theShape2, 1, checkSelfInte, theName)
## Perform Cut boolean operation on two given shapes.
# @param theShape1 First argument for boolean operation.
# @param theShape2 Second argument for boolean operation.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_cut "Example 1"
# \n @ref swig_MakeCommon "Example 2"
def MakeCut(self, theShape1, theShape2, checkSelfInte=False, theName=None):
"""
Perform Cut boolean operation on two given shapes.
Parameters:
theShape1 First argument for boolean operation.
theShape2 Second argument for boolean operation.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
# note: auto-publishing is done in self.MakeBoolean()
return self.MakeBoolean(theShape1, theShape2, 2, checkSelfInte, theName)
## Perform Fuse boolean operation on two given shapes.
# @param theShape1 First argument for boolean operation.
# @param theShape2 Second argument for boolean operation.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param rmExtraEdges The flag that tells if Remove Extra Edges
# operation should be performed during the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fuse "Example 1"
# \n @ref swig_MakeCommon "Example 2"
@ManageTransactions("BoolOp")
def MakeFuse(self, theShape1, theShape2, checkSelfInte=False,
rmExtraEdges=False, theName=None):
"""
Perform Fuse boolean operation on two given shapes.
Parameters:
theShape1 First argument for boolean operation.
theShape2 Second argument for boolean operation.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
rmExtraEdges The flag that tells if Remove Extra Edges
operation should be performed during the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BoolOp.MakeFuse(theShape1, theShape2,
checkSelfInte, rmExtraEdges)
RaiseIfFailed("MakeFuse", self.BoolOp)
self._autoPublish(anObj, theName, "fuse")
return anObj
## Perform Section boolean operation on two given shapes.
# @param theShape1 First argument for boolean operation.
# @param theShape2 Second argument for boolean operation.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_section "Example 1"
# \n @ref swig_MakeCommon "Example 2"
def MakeSection(self, theShape1, theShape2, checkSelfInte=False, theName=None):
"""
Perform Section boolean operation on two given shapes.
Parameters:
theShape1 First argument for boolean operation.
theShape2 Second argument for boolean operation.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
# note: auto-publishing is done in self.MakeBoolean()
return self.MakeBoolean(theShape1, theShape2, 4, checkSelfInte, theName)
## Perform Fuse boolean operation on the list of shapes.
# @param theShapesList Shapes to be fused.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param rmExtraEdges The flag that tells if Remove Extra Edges
# operation should be performed during the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fuse "Example 1"
# \n @ref swig_MakeCommon "Example 2"
@ManageTransactions("BoolOp")
def MakeFuseList(self, theShapesList, checkSelfInte=False,
rmExtraEdges=False, theName=None):
"""
Perform Fuse boolean operation on the list of shapes.
Parameters:
theShapesList Shapes to be fused.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
rmExtraEdges The flag that tells if Remove Extra Edges
operation should be performed during the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BoolOp.MakeFuseList(theShapesList, checkSelfInte,
rmExtraEdges)
RaiseIfFailed("MakeFuseList", self.BoolOp)
self._autoPublish(anObj, theName, "fuse")
return anObj
## Perform Common boolean operation on the list of shapes.
# @param theShapesList Shapes for Common operation.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_common "Example 1"
# \n @ref swig_MakeCommon "Example 2"
@ManageTransactions("BoolOp")
def MakeCommonList(self, theShapesList, checkSelfInte=False, theName=None):
"""
Perform Common boolean operation on the list of shapes.
Parameters:
theShapesList Shapes for Common operation.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BoolOp.MakeCommonList(theShapesList, checkSelfInte)
RaiseIfFailed("MakeCommonList", self.BoolOp)
self._autoPublish(anObj, theName, "common")
return anObj
## Perform Cut boolean operation on one object and the list of tools.
# @param theMainShape The object of the operation.
# @param theShapesList The list of tools of the operation.
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_cut "Example 1"
# \n @ref swig_MakeCommon "Example 2"
@ManageTransactions("BoolOp")
def MakeCutList(self, theMainShape, theShapesList, checkSelfInte=False, theName=None):
"""
Perform Cut boolean operation on one object and the list of tools.
Parameters:
theMainShape The object of the operation.
theShapesList The list of tools of the operation.
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BoolOp.MakeCutList(theMainShape, theShapesList, checkSelfInte)
RaiseIfFailed("MakeCutList", self.BoolOp)
self._autoPublish(anObj, theName, "cut")
return anObj
# end of l3_boolean
## @}
## @addtogroup l3_basic_op
## @{
## Perform partition operation.
# @param ListShapes Shapes to be intersected.
# @param ListTools Shapes to intersect theShapes.
# @param Limit Type of resulting shapes (see ShapeType()).\n
# If this parameter is set to -1 ("Auto"), most appropriate shape limit
# type will be detected automatically.
# @param KeepNonlimitShapes if this parameter == 0, then only shapes of
# target type (equal to Limit) are kept in the result,
# else standalone shapes of lower dimension
# are kept also (if they exist).
#
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Each compound from ListShapes and ListTools will be exploded
# in order to avoid possible intersection between shapes from this compound.
#
# After implementation new version of PartitionAlgo (October 2006)
# other parameters are ignored by current functionality. They are kept
# in this function only for support old versions.
# @param ListKeepInside Shapes, outside which the results will be deleted.
# Each shape from theKeepInside must belong to theShapes also.
# @param ListRemoveInside Shapes, inside which the results will be deleted.
# Each shape from theRemoveInside must belong to theShapes also.
# @param RemoveWebs If TRUE, perform Glue 3D algorithm.
# @param ListMaterials Material indices for each shape. Make sence,
# only if theRemoveWebs is TRUE.
#
# @return New GEOM.GEOM_Object, containing the result shapes.
#
# @ref tui_partition "Example"
@ManageTransactions("BoolOp")
def MakePartition(self, ListShapes, ListTools=[], ListKeepInside=[], ListRemoveInside=[],
Limit=ShapeType["AUTO"], RemoveWebs=0, ListMaterials=[],
KeepNonlimitShapes=0, theName=None):
"""
Perform partition operation.
Parameters:
ListShapes Shapes to be intersected.
ListTools Shapes to intersect theShapes.
Limit Type of resulting shapes (see geompy.ShapeType)
If this parameter is set to -1 ("Auto"), most appropriate shape limit
type will be detected automatically.
KeepNonlimitShapes if this parameter == 0, then only shapes of
target type (equal to Limit) are kept in the result,
else standalone shapes of lower dimension
are kept also (if they exist).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Each compound from ListShapes and ListTools will be exploded
in order to avoid possible intersection between shapes from
this compound.
After implementation new version of PartitionAlgo (October 2006) other
parameters are ignored by current functionality. They are kept in this
function only for support old versions.
Ignored parameters:
ListKeepInside Shapes, outside which the results will be deleted.
Each shape from theKeepInside must belong to theShapes also.
ListRemoveInside Shapes, inside which the results will be deleted.
Each shape from theRemoveInside must belong to theShapes also.
RemoveWebs If TRUE, perform Glue 3D algorithm.
ListMaterials Material indices for each shape. Make sence, only if theRemoveWebs is TRUE.
Returns:
New GEOM.GEOM_Object, containing the result shapes.
"""
# Example: see GEOM_TestAll.py
if Limit == self.ShapeType["AUTO"]:
# automatic detection of the most appropriate shape limit type
lim = GEOM.SHAPE
for s in ListShapes: lim = min( lim, s.GetMaxShapeType() )
Limit = EnumToLong(lim)
pass
anObj = self.BoolOp.MakePartition(ListShapes, ListTools,
ListKeepInside, ListRemoveInside,
Limit, RemoveWebs, ListMaterials,
KeepNonlimitShapes);
RaiseIfFailed("MakePartition", self.BoolOp)
self._autoPublish(anObj, theName, "partition")
return anObj
## Perform partition operation.
# This method may be useful if it is needed to make a partition for
# compound contains nonintersected shapes. Performance will be better
# since intersection between shapes from compound is not performed.
#
# Description of all parameters as in previous method MakePartition().
# One additional parameter is provided:
# @param checkSelfInte The flag that tells if the arguments should
# be checked for self-intersection prior to the operation.
#
# @note This algorithm doesn't find all types of self-intersections.
# It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
# vertex/face and edge/face intersections. Face/face
# intersections detection is switched off as it is a
# time-consuming operation that gives an impact on performance.
# To find all self-intersections please use
# CheckSelfIntersections() method.
#
# @note Passed compounds (via ListShapes or via ListTools)
# have to consist of nonintersecting shapes.
#
# @return New GEOM.GEOM_Object, containing the result shapes.
#
# @ref swig_todo "Example"
@ManageTransactions("BoolOp")
def MakePartitionNonSelfIntersectedShape(self, ListShapes, ListTools=[],
ListKeepInside=[], ListRemoveInside=[],
Limit=ShapeType["AUTO"], RemoveWebs=0,
ListMaterials=[], KeepNonlimitShapes=0,
checkSelfInte=False, theName=None):
"""
Perform partition operation.
This method may be useful if it is needed to make a partition for
compound contains nonintersected shapes. Performance will be better
since intersection between shapes from compound is not performed.
Parameters:
Description of all parameters as in method geompy.MakePartition.
One additional parameter is provided:
checkSelfInte The flag that tells if the arguments should
be checked for self-intersection prior to
the operation.
Note:
This algorithm doesn't find all types of self-intersections.
It is tuned to detect vertex/vertex, vertex/edge, edge/edge,
vertex/face and edge/face intersections. Face/face
intersections detection is switched off as it is a
time-consuming operation that gives an impact on performance.
To find all self-intersections please use
CheckSelfIntersections() method.
NOTE:
Passed compounds (via ListShapes or via ListTools)
have to consist of nonintersecting shapes.
Returns:
New GEOM.GEOM_Object, containing the result shapes.
"""
if Limit == self.ShapeType["AUTO"]:
# automatic detection of the most appropriate shape limit type
lim = GEOM.SHAPE
for s in ListShapes: lim = min( lim, s.GetMaxShapeType() )
Limit = EnumToLong(lim)
pass
anObj = self.BoolOp.MakePartitionNonSelfIntersectedShape(ListShapes, ListTools,
ListKeepInside, ListRemoveInside,
Limit, RemoveWebs, ListMaterials,
KeepNonlimitShapes, checkSelfInte);
RaiseIfFailed("MakePartitionNonSelfIntersectedShape", self.BoolOp)
self._autoPublish(anObj, theName, "partition")
return anObj
## See method MakePartition() for more information.
#
# @ref tui_partition "Example 1"
# \n @ref swig_Partition "Example 2"
def Partition(self, ListShapes, ListTools=[], ListKeepInside=[], ListRemoveInside=[],
Limit=ShapeType["AUTO"], RemoveWebs=0, ListMaterials=[],
KeepNonlimitShapes=0, theName=None):
"""
See method geompy.MakePartition for more information.
"""
# Example: see GEOM_TestOthers.py
# note: auto-publishing is done in self.MakePartition()
anObj = self.MakePartition(ListShapes, ListTools,
ListKeepInside, ListRemoveInside,
Limit, RemoveWebs, ListMaterials,
KeepNonlimitShapes, theName);
return anObj
## Perform partition of the Shape with the Plane
# @param theShape Shape to be intersected.
# @param thePlane Tool shape, to intersect theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_partition "Example"
@ManageTransactions("BoolOp")
def MakeHalfPartition(self, theShape, thePlane, theName=None):
"""
Perform partition of the Shape with the Plane
Parameters:
theShape Shape to be intersected.
thePlane Tool shape, to intersect theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.BoolOp.MakeHalfPartition(theShape, thePlane)
RaiseIfFailed("MakeHalfPartition", self.BoolOp)
self._autoPublish(anObj, theName, "partition")
return anObj
# end of l3_basic_op
## @}
## @addtogroup l3_transform
## @{
## Translate the given object along the vector, specified
# by its end points.
# @param theObject The object to be translated.
# @param thePoint1 Start point of translation vector.
# @param thePoint2 End point of translation vector.
# @param theCopy Flag used to translate object itself or create a copy.
# @return Translated @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the translated object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def TranslateTwoPoints(self, theObject, thePoint1, thePoint2, theCopy=False):
"""
Translate the given object along the vector, specified by its end points.
Parameters:
theObject The object to be translated.
thePoint1 Start point of translation vector.
thePoint2 End point of translation vector.
theCopy Flag used to translate object itself or create a copy.
Returns:
Translated theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the translated object if theCopy flag is True.
"""
if theCopy:
anObj = self.TrsfOp.TranslateTwoPointsCopy(theObject, thePoint1, thePoint2)
else:
anObj = self.TrsfOp.TranslateTwoPoints(theObject, thePoint1, thePoint2)
RaiseIfFailed("TranslateTwoPoints", self.TrsfOp)
return anObj
## Translate the given object along the vector, specified
# by its end points, creating its copy before the translation.
# @param theObject The object to be translated.
# @param thePoint1 Start point of translation vector.
# @param thePoint2 End point of translation vector.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the translated object.
#
# @ref tui_translation "Example 1"
# \n @ref swig_MakeTranslationTwoPoints "Example 2"
@ManageTransactions("TrsfOp")
def MakeTranslationTwoPoints(self, theObject, thePoint1, thePoint2, theName=None):
"""
Translate the given object along the vector, specified
by its end points, creating its copy before the translation.
Parameters:
theObject The object to be translated.
thePoint1 Start point of translation vector.
thePoint2 End point of translation vector.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the translated object.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.TranslateTwoPointsCopy(theObject, thePoint1, thePoint2)
RaiseIfFailed("TranslateTwoPointsCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "translated")
return anObj
## Translate the given object along the vector, specified by its components.
# @param theObject The object to be translated.
# @param theDX,theDY,theDZ Components of translation vector.
# @param theCopy Flag used to translate object itself or create a copy.
# @return Translated @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the translated object if @a theCopy flag is @c True.
#
# @ref tui_translation "Example"
@ManageTransactions("TrsfOp")
def TranslateDXDYDZ(self, theObject, theDX, theDY, theDZ, theCopy=False):
"""
Translate the given object along the vector, specified by its components.
Parameters:
theObject The object to be translated.
theDX,theDY,theDZ Components of translation vector.
theCopy Flag used to translate object itself or create a copy.
Returns:
Translated theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the translated object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
theDX, theDY, theDZ, Parameters = ParseParameters(theDX, theDY, theDZ)
if theCopy:
anObj = self.TrsfOp.TranslateDXDYDZCopy(theObject, theDX, theDY, theDZ)
else:
anObj = self.TrsfOp.TranslateDXDYDZ(theObject, theDX, theDY, theDZ)
anObj.SetParameters(Parameters)
RaiseIfFailed("TranslateDXDYDZ", self.TrsfOp)
return anObj
## Translate the given object along the vector, specified
# by its components, creating its copy before the translation.
# @param theObject The object to be translated.
# @param theDX,theDY,theDZ Components of translation vector.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the translated object.
#
# @ref tui_translation "Example"
@ManageTransactions("TrsfOp")
def MakeTranslation(self,theObject, theDX, theDY, theDZ, theName=None):
"""
Translate the given object along the vector, specified
by its components, creating its copy before the translation.
Parameters:
theObject The object to be translated.
theDX,theDY,theDZ Components of translation vector.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the translated object.
"""
# Example: see GEOM_TestAll.py
theDX, theDY, theDZ, Parameters = ParseParameters(theDX, theDY, theDZ)
anObj = self.TrsfOp.TranslateDXDYDZCopy(theObject, theDX, theDY, theDZ)
anObj.SetParameters(Parameters)
RaiseIfFailed("TranslateDXDYDZ", self.TrsfOp)
self._autoPublish(anObj, theName, "translated")
return anObj
## Translate the given object along the given vector.
# @param theObject The object to be translated.
# @param theVector The translation vector.
# @param theCopy Flag used to translate object itself or create a copy.
# @return Translated @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the translated object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def TranslateVector(self, theObject, theVector, theCopy=False):
"""
Translate the given object along the given vector.
Parameters:
theObject The object to be translated.
theVector The translation vector.
theCopy Flag used to translate object itself or create a copy.
Returns:
Translated theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the translated object if theCopy flag is True.
"""
if theCopy:
anObj = self.TrsfOp.TranslateVectorCopy(theObject, theVector)
else:
anObj = self.TrsfOp.TranslateVector(theObject, theVector)
RaiseIfFailed("TranslateVector", self.TrsfOp)
return anObj
## Translate the given object along the given vector,
# creating its copy before the translation.
# @param theObject The object to be translated.
# @param theVector The translation vector.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the translated object.
#
# @ref tui_translation "Example"
@ManageTransactions("TrsfOp")
def MakeTranslationVector(self, theObject, theVector, theName=None):
"""
Translate the given object along the given vector,
creating its copy before the translation.
Parameters:
theObject The object to be translated.
theVector The translation vector.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the translated object.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.TranslateVectorCopy(theObject, theVector)
RaiseIfFailed("TranslateVectorCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "translated")
return anObj
## Translate the given object along the given vector on given distance.
# @param theObject The object to be translated.
# @param theVector The translation vector.
# @param theDistance The translation distance.
# @param theCopy Flag used to translate object itself or create a copy.
# @return Translated @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the translated object if @a theCopy flag is @c True.
#
# @ref tui_translation "Example"
@ManageTransactions("TrsfOp")
def TranslateVectorDistance(self, theObject, theVector, theDistance, theCopy=False):
"""
Translate the given object along the given vector on given distance.
Parameters:
theObject The object to be translated.
theVector The translation vector.
theDistance The translation distance.
theCopy Flag used to translate object itself or create a copy.
Returns:
Translated theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the translated object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
theDistance,Parameters = ParseParameters(theDistance)
anObj = self.TrsfOp.TranslateVectorDistance(theObject, theVector, theDistance, theCopy)
RaiseIfFailed("TranslateVectorDistance", self.TrsfOp)
anObj.SetParameters(Parameters)
return anObj
## Translate the given object along the given vector on given distance,
# creating its copy before the translation.
# @param theObject The object to be translated.
# @param theVector The translation vector.
# @param theDistance The translation distance.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the translated object.
#
# @ref tui_translation "Example"
@ManageTransactions("TrsfOp")
def MakeTranslationVectorDistance(self, theObject, theVector, theDistance, theName=None):
"""
Translate the given object along the given vector on given distance,
creating its copy before the translation.
Parameters:
theObject The object to be translated.
theVector The translation vector.
theDistance The translation distance.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the translated object.
"""
# Example: see GEOM_TestAll.py
theDistance,Parameters = ParseParameters(theDistance)
anObj = self.TrsfOp.TranslateVectorDistance(theObject, theVector, theDistance, 1)
RaiseIfFailed("TranslateVectorDistance", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "translated")
return anObj
## Rotate the given object around the given axis on the given angle.
# @param theObject The object to be rotated.
# @param theAxis Rotation axis.
# @param theAngle Rotation angle in radians.
# @param theCopy Flag used to rotate object itself or create a copy.
#
# @return Rotated @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the rotated object if @a theCopy flag is @c True.
#
# @ref tui_rotation "Example"
@ManageTransactions("TrsfOp")
def Rotate(self, theObject, theAxis, theAngle, theCopy=False):
"""
Rotate the given object around the given axis on the given angle.
Parameters:
theObject The object to be rotated.
theAxis Rotation axis.
theAngle Rotation angle in radians.
theCopy Flag used to rotate object itself or create a copy.
Returns:
Rotated theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the rotated object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
flag = False
if isinstance(theAngle,str):
flag = True
theAngle, Parameters = ParseParameters(theAngle)
if flag:
theAngle = theAngle*math.pi/180.0
if theCopy:
anObj = self.TrsfOp.RotateCopy(theObject, theAxis, theAngle)
else:
anObj = self.TrsfOp.Rotate(theObject, theAxis, theAngle)
RaiseIfFailed("Rotate", self.TrsfOp)
anObj.SetParameters(Parameters)
return anObj
## Rotate the given object around the given axis
# on the given angle, creating its copy before the rotation.
# @param theObject The object to be rotated.
# @param theAxis Rotation axis.
# @param theAngle Rotation angle in radians.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the rotated object.
#
# @ref tui_rotation "Example"
@ManageTransactions("TrsfOp")
def MakeRotation(self, theObject, theAxis, theAngle, theName=None):
"""
Rotate the given object around the given axis
on the given angle, creating its copy before the rotatation.
Parameters:
theObject The object to be rotated.
theAxis Rotation axis.
theAngle Rotation angle in radians.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the rotated object.
"""
# Example: see GEOM_TestAll.py
flag = False
if isinstance(theAngle,str):
flag = True
theAngle, Parameters = ParseParameters(theAngle)
if flag:
theAngle = theAngle*math.pi/180.0
anObj = self.TrsfOp.RotateCopy(theObject, theAxis, theAngle)
RaiseIfFailed("RotateCopy", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "rotated")
return anObj
## Rotate given object around vector perpendicular to plane
# containing three points.
# @param theObject The object to be rotated.
# @param theCentPoint central point the axis is the vector perpendicular to the plane
# containing the three points.
# @param thePoint1,thePoint2 points in a perpendicular plane of the axis.
# @param theCopy Flag used to rotate object itself or create a copy.
# @return Rotated @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the rotated object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def RotateThreePoints(self, theObject, theCentPoint, thePoint1, thePoint2, theCopy=False):
"""
Rotate given object around vector perpendicular to plane
containing three points.
Parameters:
theObject The object to be rotated.
theCentPoint central point the axis is the vector perpendicular to the plane
containing the three points.
thePoint1,thePoint2 points in a perpendicular plane of the axis.
theCopy Flag used to rotate object itself or create a copy.
Returns:
Rotated theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the rotated object if theCopy flag is True.
"""
if theCopy:
anObj = self.TrsfOp.RotateThreePointsCopy(theObject, theCentPoint, thePoint1, thePoint2)
else:
anObj = self.TrsfOp.RotateThreePoints(theObject, theCentPoint, thePoint1, thePoint2)
RaiseIfFailed("RotateThreePoints", self.TrsfOp)
return anObj
## Rotate given object around vector perpendicular to plane
# containing three points, creating its copy before the rotatation.
# @param theObject The object to be rotated.
# @param theCentPoint central point the axis is the vector perpendicular to the plane
# containing the three points.
# @param thePoint1,thePoint2 in a perpendicular plane of the axis.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the rotated object.
#
# @ref tui_rotation "Example"
@ManageTransactions("TrsfOp")
def MakeRotationThreePoints(self, theObject, theCentPoint, thePoint1, thePoint2, theName=None):
"""
Rotate given object around vector perpendicular to plane
containing three points, creating its copy before the rotatation.
Parameters:
theObject The object to be rotated.
theCentPoint central point the axis is the vector perpendicular to the plane
containing the three points.
thePoint1,thePoint2 in a perpendicular plane of the axis.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the rotated object.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.RotateThreePointsCopy(theObject, theCentPoint, thePoint1, thePoint2)
RaiseIfFailed("RotateThreePointsCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "rotated")
return anObj
## Scale the given object by the specified factor.
# @param theObject The object to be scaled.
# @param thePoint Center point for scaling.
# Passing None for it means scaling relatively the origin of global CS.
# @param theFactor Scaling factor value.
# @param theCopy Flag used to scale object itself or create a copy.
# @return Scaled @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the scaled object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def Scale(self, theObject, thePoint, theFactor, theCopy=False):
"""
Scale the given object by the specified factor.
Parameters:
theObject The object to be scaled.
thePoint Center point for scaling.
Passing None for it means scaling relatively the origin of global CS.
theFactor Scaling factor value.
theCopy Flag used to scale object itself or create a copy.
Returns:
Scaled theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the scaled object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
theFactor, Parameters = ParseParameters(theFactor)
if theCopy:
anObj = self.TrsfOp.ScaleShapeCopy(theObject, thePoint, theFactor)
else:
anObj = self.TrsfOp.ScaleShape(theObject, thePoint, theFactor)
RaiseIfFailed("Scale", self.TrsfOp)
anObj.SetParameters(Parameters)
return anObj
## Scale the given object by the factor, creating its copy before the scaling.
# @param theObject The object to be scaled.
# @param thePoint Center point for scaling.
# Passing None for it means scaling relatively the origin of global CS.
# @param theFactor Scaling factor value.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the scaled shape.
#
# @ref tui_scale "Example"
@ManageTransactions("TrsfOp")
def MakeScaleTransform(self, theObject, thePoint, theFactor, theName=None):
"""
Scale the given object by the factor, creating its copy before the scaling.
Parameters:
theObject The object to be scaled.
thePoint Center point for scaling.
Passing None for it means scaling relatively the origin of global CS.
theFactor Scaling factor value.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the scaled shape.
"""
# Example: see GEOM_TestAll.py
theFactor, Parameters = ParseParameters(theFactor)
anObj = self.TrsfOp.ScaleShapeCopy(theObject, thePoint, theFactor)
RaiseIfFailed("ScaleShapeCopy", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "scaled")
return anObj
## Scale the given object by different factors along coordinate axes.
# @param theObject The object to be scaled.
# @param thePoint Center point for scaling.
# Passing None for it means scaling relatively the origin of global CS.
# @param theFactorX,theFactorY,theFactorZ Scaling factors along each axis.
# @param theCopy Flag used to scale object itself or create a copy.
# @return Scaled @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the scaled object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def ScaleAlongAxes(self, theObject, thePoint, theFactorX, theFactorY, theFactorZ, theCopy=False):
"""
Scale the given object by different factors along coordinate axes.
Parameters:
theObject The object to be scaled.
thePoint Center point for scaling.
Passing None for it means scaling relatively the origin of global CS.
theFactorX,theFactorY,theFactorZ Scaling factors along each axis.
theCopy Flag used to scale object itself or create a copy.
Returns:
Scaled theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the scaled object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
theFactorX, theFactorY, theFactorZ, Parameters = ParseParameters(theFactorX, theFactorY, theFactorZ)
if theCopy:
anObj = self.TrsfOp.ScaleShapeAlongAxesCopy(theObject, thePoint,
theFactorX, theFactorY, theFactorZ)
else:
anObj = self.TrsfOp.ScaleShapeAlongAxes(theObject, thePoint,
theFactorX, theFactorY, theFactorZ)
RaiseIfFailed("ScaleAlongAxes", self.TrsfOp)
anObj.SetParameters(Parameters)
return anObj
## Scale the given object by different factors along coordinate axes,
# creating its copy before the scaling.
# @param theObject The object to be scaled.
# @param thePoint Center point for scaling.
# Passing None for it means scaling relatively the origin of global CS.
# @param theFactorX,theFactorY,theFactorZ Scaling factors along each axis.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the scaled shape.
#
# @ref swig_scale "Example"
@ManageTransactions("TrsfOp")
def MakeScaleAlongAxes(self, theObject, thePoint, theFactorX, theFactorY, theFactorZ, theName=None):
"""
Scale the given object by different factors along coordinate axes,
creating its copy before the scaling.
Parameters:
theObject The object to be scaled.
thePoint Center point for scaling.
Passing None for it means scaling relatively the origin of global CS.
theFactorX,theFactorY,theFactorZ Scaling factors along each axis.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the scaled shape.
"""
# Example: see GEOM_TestAll.py
theFactorX, theFactorY, theFactorZ, Parameters = ParseParameters(theFactorX, theFactorY, theFactorZ)
anObj = self.TrsfOp.ScaleShapeAlongAxesCopy(theObject, thePoint,
theFactorX, theFactorY, theFactorZ)
RaiseIfFailed("MakeScaleAlongAxes", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "scaled")
return anObj
## Mirror an object relatively the given plane.
# @param theObject The object to be mirrored.
# @param thePlane Plane of symmetry.
# @param theCopy Flag used to mirror object itself or create a copy.
# @return Mirrored @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the mirrored object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def MirrorByPlane(self, theObject, thePlane, theCopy=False):
"""
Mirror an object relatively the given plane.
Parameters:
theObject The object to be mirrored.
thePlane Plane of symmetry.
theCopy Flag used to mirror object itself or create a copy.
Returns:
Mirrored theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the mirrored object if theCopy flag is True.
"""
if theCopy:
anObj = self.TrsfOp.MirrorPlaneCopy(theObject, thePlane)
else:
anObj = self.TrsfOp.MirrorPlane(theObject, thePlane)
RaiseIfFailed("MirrorByPlane", self.TrsfOp)
return anObj
## Create an object, symmetrical
# to the given one relatively the given plane.
# @param theObject The object to be mirrored.
# @param thePlane Plane of symmetry.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the mirrored shape.
#
# @ref tui_mirror "Example"
@ManageTransactions("TrsfOp")
def MakeMirrorByPlane(self, theObject, thePlane, theName=None):
"""
Create an object, symmetrical to the given one relatively the given plane.
Parameters:
theObject The object to be mirrored.
thePlane Plane of symmetry.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the mirrored shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.MirrorPlaneCopy(theObject, thePlane)
RaiseIfFailed("MirrorPlaneCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "mirrored")
return anObj
## Mirror an object relatively the given axis.
# @param theObject The object to be mirrored.
# @param theAxis Axis of symmetry.
# @param theCopy Flag used to mirror object itself or create a copy.
# @return Mirrored @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the mirrored object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def MirrorByAxis(self, theObject, theAxis, theCopy=False):
"""
Mirror an object relatively the given axis.
Parameters:
theObject The object to be mirrored.
theAxis Axis of symmetry.
theCopy Flag used to mirror object itself or create a copy.
Returns:
Mirrored theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the mirrored object if theCopy flag is True.
"""
if theCopy:
anObj = self.TrsfOp.MirrorAxisCopy(theObject, theAxis)
else:
anObj = self.TrsfOp.MirrorAxis(theObject, theAxis)
RaiseIfFailed("MirrorByAxis", self.TrsfOp)
return anObj
## Create an object, symmetrical
# to the given one relatively the given axis.
# @param theObject The object to be mirrored.
# @param theAxis Axis of symmetry.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the mirrored shape.
#
# @ref tui_mirror "Example"
@ManageTransactions("TrsfOp")
def MakeMirrorByAxis(self, theObject, theAxis, theName=None):
"""
Create an object, symmetrical to the given one relatively the given axis.
Parameters:
theObject The object to be mirrored.
theAxis Axis of symmetry.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the mirrored shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.MirrorAxisCopy(theObject, theAxis)
RaiseIfFailed("MirrorAxisCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "mirrored")
return anObj
## Mirror an object relatively the given point.
# @param theObject The object to be mirrored.
# @param thePoint Point of symmetry.
# @param theCopy Flag used to mirror object itself or create a copy.
# @return Mirrored @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the mirrored object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def MirrorByPoint(self, theObject, thePoint, theCopy=False):
"""
Mirror an object relatively the given point.
Parameters:
theObject The object to be mirrored.
thePoint Point of symmetry.
theCopy Flag used to mirror object itself or create a copy.
Returns:
Mirrored theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the mirrored object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
if theCopy:
anObj = self.TrsfOp.MirrorPointCopy(theObject, thePoint)
else:
anObj = self.TrsfOp.MirrorPoint(theObject, thePoint)
RaiseIfFailed("MirrorByPoint", self.TrsfOp)
return anObj
## Create an object, symmetrical
# to the given one relatively the given point.
# @param theObject The object to be mirrored.
# @param thePoint Point of symmetry.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the mirrored shape.
#
# @ref tui_mirror "Example"
@ManageTransactions("TrsfOp")
def MakeMirrorByPoint(self, theObject, thePoint, theName=None):
"""
Create an object, symmetrical
to the given one relatively the given point.
Parameters:
theObject The object to be mirrored.
thePoint Point of symmetry.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the mirrored shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.MirrorPointCopy(theObject, thePoint)
RaiseIfFailed("MirrorPointCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "mirrored")
return anObj
## Modify the location of the given object.
# @param theObject The object to be displaced.
# @param theStartLCS Coordinate system to perform displacement from it.\n
# If \a theStartLCS is NULL, displacement
# will be performed from global CS.\n
# If \a theObject itself is used as \a theStartLCS,
# its location will be changed to \a theEndLCS.
# @param theEndLCS Coordinate system to perform displacement to it.
# @param theCopy Flag used to displace object itself or create a copy.
# @return Displaced @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the displaced object if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def Position(self, theObject, theStartLCS, theEndLCS, theCopy=False):
"""
Modify the Location of the given object by LCS, creating its copy before the setting.
Parameters:
theObject The object to be displaced.
theStartLCS Coordinate system to perform displacement from it.
If theStartLCS is NULL, displacement
will be performed from global CS.
If theObject itself is used as theStartLCS,
its location will be changed to theEndLCS.
theEndLCS Coordinate system to perform displacement to it.
theCopy Flag used to displace object itself or create a copy.
Returns:
Displaced theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the displaced object if theCopy flag is True.
"""
# Example: see GEOM_TestAll.py
if theCopy:
anObj = self.TrsfOp.PositionShapeCopy(theObject, theStartLCS, theEndLCS)
else:
anObj = self.TrsfOp.PositionShape(theObject, theStartLCS, theEndLCS)
RaiseIfFailed("Displace", self.TrsfOp)
return anObj
## Modify the Location of the given object by LCS,
# creating its copy before the setting.
# @param theObject The object to be displaced.
# @param theStartLCS Coordinate system to perform displacement from it.\n
# If \a theStartLCS is NULL, displacement
# will be performed from global CS.\n
# If \a theObject itself is used as \a theStartLCS,
# its location will be changed to \a theEndLCS.
# @param theEndLCS Coordinate system to perform displacement to it.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the displaced shape.
#
# @ref tui_modify_location "Example"
@ManageTransactions("TrsfOp")
def MakePosition(self, theObject, theStartLCS, theEndLCS, theName=None):
"""
Modify the Location of the given object by LCS, creating its copy before the setting.
Parameters:
theObject The object to be displaced.
theStartLCS Coordinate system to perform displacement from it.
If theStartLCS is NULL, displacement
will be performed from global CS.
If theObject itself is used as theStartLCS,
its location will be changed to theEndLCS.
theEndLCS Coordinate system to perform displacement to it.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the displaced shape.
Example of usage:
# create local coordinate systems
cs1 = geompy.MakeMarker( 0, 0, 0, 1,0,0, 0,1,0)
cs2 = geompy.MakeMarker(30,40,40, 1,0,0, 0,1,0)
# modify the location of the given object
position = geompy.MakePosition(cylinder, cs1, cs2)
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.PositionShapeCopy(theObject, theStartLCS, theEndLCS)
RaiseIfFailed("PositionShapeCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "displaced")
return anObj
## Modify the Location of the given object by Path.
# @param theObject The object to be displaced.
# @param thePath Wire or Edge along that the object will be translated.
# @param theDistance progress of Path (0 = start location, 1 = end of path location).
# @param theCopy is to create a copy objects if true.
# @param theReverse 0 - for usual direction, 1 - to reverse path direction.
# @return Displaced @a theObject (GEOM.GEOM_Object) if @a theCopy is @c False or
# new GEOM.GEOM_Object, containing the displaced shape if @a theCopy is @c True.
#
# @ref tui_modify_location "Example"
@ManageTransactions("TrsfOp")
def PositionAlongPath(self,theObject, thePath, theDistance, theCopy, theReverse):
"""
Modify the Location of the given object by Path.
Parameters:
theObject The object to be displaced.
thePath Wire or Edge along that the object will be translated.
theDistance progress of Path (0 = start location, 1 = end of path location).
theCopy is to create a copy objects if true.
theReverse 0 - for usual direction, 1 - to reverse path direction.
Returns:
Displaced theObject (GEOM.GEOM_Object) if theCopy is False or
new GEOM.GEOM_Object, containing the displaced shape if theCopy is True.
Example of usage:
position = geompy.PositionAlongPath(cylinder, circle, 0.75, 1, 1)
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.PositionAlongPath(theObject, thePath, theDistance, theCopy, theReverse)
RaiseIfFailed("PositionAlongPath", self.TrsfOp)
return anObj
## Modify the Location of the given object by Path, creating its copy before the operation.
# @param theObject The object to be displaced.
# @param thePath Wire or Edge along that the object will be translated.
# @param theDistance progress of Path (0 = start location, 1 = end of path location).
# @param theReverse 0 - for usual direction, 1 - to reverse path direction.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the displaced shape.
@ManageTransactions("TrsfOp")
def MakePositionAlongPath(self, theObject, thePath, theDistance, theReverse, theName=None):
"""
Modify the Location of the given object by Path, creating its copy before the operation.
Parameters:
theObject The object to be displaced.
thePath Wire or Edge along that the object will be translated.
theDistance progress of Path (0 = start location, 1 = end of path location).
theReverse 0 - for usual direction, 1 - to reverse path direction.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the displaced shape.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.PositionAlongPath(theObject, thePath, theDistance, 1, theReverse)
RaiseIfFailed("PositionAlongPath", self.TrsfOp)
self._autoPublish(anObj, theName, "displaced")
return anObj
## Offset given shape.
# @param theObject The base object for the offset.
# @param theOffset Offset value.
# @param theCopy Flag used to offset object itself or create a copy.
# @return Modified @a theObject (GEOM.GEOM_Object) if @a theCopy flag is @c False (default) or
# new GEOM.GEOM_Object, containing the result of offset operation if @a theCopy flag is @c True.
@ManageTransactions("TrsfOp")
def Offset(self, theObject, theOffset, theCopy=False):
"""
Offset given shape.
Parameters:
theObject The base object for the offset.
theOffset Offset value.
theCopy Flag used to offset object itself or create a copy.
Returns:
Modified theObject (GEOM.GEOM_Object) if theCopy flag is False (default) or
new GEOM.GEOM_Object, containing the result of offset operation if theCopy flag is True.
"""
theOffset, Parameters = ParseParameters(theOffset)
if theCopy:
anObj = self.TrsfOp.OffsetShapeCopy(theObject, theOffset)
else:
anObj = self.TrsfOp.OffsetShape(theObject, theOffset)
RaiseIfFailed("Offset", self.TrsfOp)
anObj.SetParameters(Parameters)
return anObj
## Create new object as offset of the given one.
# @param theObject The base object for the offset.
# @param theOffset Offset value.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the offset object.
#
# @ref tui_offset "Example"
@ManageTransactions("TrsfOp")
def MakeOffset(self, theObject, theOffset, theName=None):
"""
Create new object as offset of the given one.
Parameters:
theObject The base object for the offset.
theOffset Offset value.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the offset object.
Example of usage:
box = geompy.MakeBox(20, 20, 20, 200, 200, 200)
# create a new object as offset of the given object
offset = geompy.MakeOffset(box, 70.)
"""
# Example: see GEOM_TestAll.py
theOffset, Parameters = ParseParameters(theOffset)
anObj = self.TrsfOp.OffsetShapeCopy(theObject, theOffset)
RaiseIfFailed("OffsetShapeCopy", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "offset")
return anObj
## Create new object as projection of the given one on a 2D surface.
# @param theSource The source object for the projection. It can be a point, edge or wire.
# @param theTarget The target object. It can be planar or cylindrical face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the projection.
#
# @ref tui_projection "Example"
@ManageTransactions("TrsfOp")
def MakeProjection(self, theSource, theTarget, theName=None):
"""
Create new object as projection of the given one on a 2D surface.
Parameters:
theSource The source object for the projection. It can be a point, edge or wire.
theTarget The target object. It can be planar or cylindrical face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the projection.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.ProjectShapeCopy(theSource, theTarget)
RaiseIfFailed("ProjectShapeCopy", self.TrsfOp)
self._autoPublish(anObj, theName, "projection")
return anObj
## Create a projection projection of the given point on a wire or an edge.
# If there are no solutions or there are 2 or more solutions It throws an
# exception.
# @param thePoint the point to be projected.
# @param theWire the wire. The edge is accepted as well.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return [\a u, \a PointOnEdge, \a EdgeInWireIndex]
# \n \a u: The parameter of projection point on edge.
# \n \a PointOnEdge: The projection point.
# \n \a EdgeInWireIndex: The index of an edge in a wire.
#
# @ref tui_projection "Example"
@ManageTransactions("TrsfOp")
def MakeProjectionOnWire(self, thePoint, theWire, theName=None):
"""
Create a projection projection of the given point on a wire or an edge.
If there are no solutions or there are 2 or more solutions It throws an
exception.
Parameters:
thePoint the point to be projected.
theWire the wire. The edge is accepted as well.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
[u, PointOnEdge, EdgeInWireIndex]
u: The parameter of projection point on edge.
PointOnEdge: The projection point.
EdgeInWireIndex: The index of an edge in a wire.
"""
# Example: see GEOM_TestAll.py
anObj = self.TrsfOp.ProjectPointOnWire(thePoint, theWire)
RaiseIfFailed("ProjectPointOnWire", self.TrsfOp)
self._autoPublish(anObj[1], theName, "projection")
return anObj
# -----------------------------------------------------------------------------
# Patterns
# -----------------------------------------------------------------------------
## Translate the given object along the given vector a given number times
# @param theObject The object to be translated.
# @param theVector Direction of the translation. DX if None.
# @param theStep Distance to translate on.
# @param theNbTimes Quantity of translations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing compound of all
# the shapes, obtained after each translation.
#
# @ref tui_multi_translation "Example"
@ManageTransactions("TrsfOp")
def MakeMultiTranslation1D(self, theObject, theVector, theStep, theNbTimes, theName=None):
"""
Translate the given object along the given vector a given number times
Parameters:
theObject The object to be translated.
theVector Direction of the translation. DX if None.
theStep Distance to translate on.
theNbTimes Quantity of translations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing compound of all
the shapes, obtained after each translation.
Example of usage:
r1d = geompy.MakeMultiTranslation1D(prism, vect, 20, 4)
"""
# Example: see GEOM_TestAll.py
theStep, theNbTimes, Parameters = ParseParameters(theStep, theNbTimes)
anObj = self.TrsfOp.MultiTranslate1D(theObject, theVector, theStep, theNbTimes)
RaiseIfFailed("MultiTranslate1D", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multitranslation")
return anObj
## Conseqently apply two specified translations to theObject specified number of times.
# @param theObject The object to be translated.
# @param theVector1 Direction of the first translation. DX if None.
# @param theStep1 Step of the first translation.
# @param theNbTimes1 Quantity of translations to be done along theVector1.
# @param theVector2 Direction of the second translation. DY if None.
# @param theStep2 Step of the second translation.
# @param theNbTimes2 Quantity of translations to be done along theVector2.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing compound of all
# the shapes, obtained after each translation.
#
# @ref tui_multi_translation "Example"
@ManageTransactions("TrsfOp")
def MakeMultiTranslation2D(self, theObject, theVector1, theStep1, theNbTimes1,
theVector2, theStep2, theNbTimes2, theName=None):
"""
Conseqently apply two specified translations to theObject specified number of times.
Parameters:
theObject The object to be translated.
theVector1 Direction of the first translation. DX if None.
theStep1 Step of the first translation.
theNbTimes1 Quantity of translations to be done along theVector1.
theVector2 Direction of the second translation. DY if None.
theStep2 Step of the second translation.
theNbTimes2 Quantity of translations to be done along theVector2.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing compound of all
the shapes, obtained after each translation.
Example of usage:
tr2d = geompy.MakeMultiTranslation2D(prism, vect1, 20, 4, vect2, 80, 3)
"""
# Example: see GEOM_TestAll.py
theStep1,theNbTimes1,theStep2,theNbTimes2, Parameters = ParseParameters(theStep1,theNbTimes1,theStep2,theNbTimes2)
anObj = self.TrsfOp.MultiTranslate2D(theObject, theVector1, theStep1, theNbTimes1,
theVector2, theStep2, theNbTimes2)
RaiseIfFailed("MultiTranslate2D", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multitranslation")
return anObj
## Rotate the given object around the given axis a given number times.
# Rotation angle will be 2*PI/theNbTimes.
# @param theObject The object to be rotated.
# @param theAxis The rotation axis. DZ if None.
# @param theNbTimes Quantity of rotations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing compound of all the
# shapes, obtained after each rotation.
#
# @ref tui_multi_rotation "Example"
@ManageTransactions("TrsfOp")
def MultiRotate1DNbTimes (self, theObject, theAxis, theNbTimes, theName=None):
"""
Rotate the given object around the given axis a given number times.
Rotation angle will be 2*PI/theNbTimes.
Parameters:
theObject The object to be rotated.
theAxis The rotation axis. DZ if None.
theNbTimes Quantity of rotations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing compound of all the
shapes, obtained after each rotation.
Example of usage:
rot1d = geompy.MultiRotate1DNbTimes(prism, vect, 4)
"""
# Example: see GEOM_TestAll.py
theNbTimes, Parameters = ParseParameters(theNbTimes)
anObj = self.TrsfOp.MultiRotate1D(theObject, theAxis, theNbTimes)
RaiseIfFailed("MultiRotate1DNbTimes", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multirotation")
return anObj
## Rotate the given object around the given axis
# a given number times on the given angle.
# @param theObject The object to be rotated.
# @param theAxis The rotation axis. DZ if None.
# @param theAngleStep Rotation angle in radians.
# @param theNbTimes Quantity of rotations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing compound of all the
# shapes, obtained after each rotation.
#
# @ref tui_multi_rotation "Example"
@ManageTransactions("TrsfOp")
def MultiRotate1DByStep(self, theObject, theAxis, theAngleStep, theNbTimes, theName=None):
"""
Rotate the given object around the given axis
a given number times on the given angle.
Parameters:
theObject The object to be rotated.
theAxis The rotation axis. DZ if None.
theAngleStep Rotation angle in radians.
theNbTimes Quantity of rotations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing compound of all the
shapes, obtained after each rotation.
Example of usage:
rot1d = geompy.MultiRotate1DByStep(prism, vect, math.pi/4, 4)
"""
# Example: see GEOM_TestAll.py
theAngleStep, theNbTimes, Parameters = ParseParameters(theAngleStep, theNbTimes)
anObj = self.TrsfOp.MultiRotate1DByStep(theObject, theAxis, theAngleStep, theNbTimes)
RaiseIfFailed("MultiRotate1DByStep", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multirotation")
return anObj
## Rotate the given object around the given axis a given
# number times and multi-translate each rotation result.
# Rotation angle will be 2*PI/theNbTimes1.
# Translation direction passes through center of gravity
# of rotated shape and its projection on the rotation axis.
# @param theObject The object to be rotated.
# @param theAxis Rotation axis. DZ if None.
# @param theNbTimes1 Quantity of rotations to be done.
# @param theRadialStep Translation distance.
# @param theNbTimes2 Quantity of translations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing compound of all the
# shapes, obtained after each transformation.
#
# @ref tui_multi_rotation "Example"
@ManageTransactions("TrsfOp")
def MultiRotate2DNbTimes(self, theObject, theAxis, theNbTimes1, theRadialStep, theNbTimes2, theName=None):
"""
Rotate the given object around the
given axis on the given angle a given number
times and multi-translate each rotation result.
Translation direction passes through center of gravity
of rotated shape and its projection on the rotation axis.
Parameters:
theObject The object to be rotated.
theAxis Rotation axis. DZ if None.
theNbTimes1 Quantity of rotations to be done.
theRadialStep Translation distance.
theNbTimes2 Quantity of translations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing compound of all the
shapes, obtained after each transformation.
Example of usage:
rot2d = geompy.MultiRotate2D(prism, vect, 60, 4, 50, 5)
"""
# Example: see GEOM_TestAll.py
theNbTimes1, theRadialStep, theNbTimes2, Parameters = ParseParameters(theNbTimes1, theRadialStep, theNbTimes2)
anObj = self.TrsfOp.MultiRotate2DNbTimes(theObject, theAxis, theNbTimes1, theRadialStep, theNbTimes2)
RaiseIfFailed("MultiRotate2DNbTimes", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multirotation")
return anObj
## Rotate the given object around the
# given axis on the given angle a given number
# times and multi-translate each rotation result.
# Translation direction passes through center of gravity
# of rotated shape and its projection on the rotation axis.
# @param theObject The object to be rotated.
# @param theAxis Rotation axis. DZ if None.
# @param theAngleStep Rotation angle in radians.
# @param theNbTimes1 Quantity of rotations to be done.
# @param theRadialStep Translation distance.
# @param theNbTimes2 Quantity of translations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing compound of all the
# shapes, obtained after each transformation.
#
# @ref tui_multi_rotation "Example"
@ManageTransactions("TrsfOp")
def MultiRotate2DByStep (self, theObject, theAxis, theAngleStep, theNbTimes1, theRadialStep, theNbTimes2, theName=None):
"""
Rotate the given object around the
given axis on the given angle a given number
times and multi-translate each rotation result.
Translation direction passes through center of gravity
of rotated shape and its projection on the rotation axis.
Parameters:
theObject The object to be rotated.
theAxis Rotation axis. DZ if None.
theAngleStep Rotation angle in radians.
theNbTimes1 Quantity of rotations to be done.
theRadialStep Translation distance.
theNbTimes2 Quantity of translations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing compound of all the
shapes, obtained after each transformation.
Example of usage:
rot2d = geompy.MultiRotate2D(prism, vect, math.pi/3, 4, 50, 5)
"""
# Example: see GEOM_TestAll.py
theAngleStep, theNbTimes1, theRadialStep, theNbTimes2, Parameters = ParseParameters(theAngleStep, theNbTimes1, theRadialStep, theNbTimes2)
anObj = self.TrsfOp.MultiRotate2DByStep(theObject, theAxis, theAngleStep, theNbTimes1, theRadialStep, theNbTimes2)
RaiseIfFailed("MultiRotate2DByStep", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multirotation")
return anObj
## The same, as MultiRotate1DNbTimes(), but axis is given by direction and point
#
# @ref swig_MakeMultiRotation "Example"
def MakeMultiRotation1DNbTimes(self, aShape, aDir, aPoint, aNbTimes, theName=None):
"""
The same, as geompy.MultiRotate1DNbTimes, but axis is given by direction and point
Example of usage:
pz = geompy.MakeVertex(0, 0, 100)
vy = geompy.MakeVectorDXDYDZ(0, 100, 0)
MultiRot1D = geompy.MakeMultiRotation1DNbTimes(prism, vy, pz, 6)
"""
# Example: see GEOM_TestOthers.py
aVec = self.MakeLine(aPoint,aDir)
# note: auto-publishing is done in self.MultiRotate1D()
anObj = self.MultiRotate1DNbTimes(aShape, aVec, aNbTimes, theName)
return anObj
## The same, as MultiRotate1DByStep(), but axis is given by direction and point
#
# @ref swig_MakeMultiRotation "Example"
def MakeMultiRotation1DByStep(self, aShape, aDir, aPoint, anAngle, aNbTimes, theName=None):
"""
The same, as geompy.MultiRotate1D, but axis is given by direction and point
Example of usage:
pz = geompy.MakeVertex(0, 0, 100)
vy = geompy.MakeVectorDXDYDZ(0, 100, 0)
MultiRot1D = geompy.MakeMultiRotation1DByStep(prism, vy, pz, math.pi/3, 6)
"""
# Example: see GEOM_TestOthers.py
aVec = self.MakeLine(aPoint,aDir)
# note: auto-publishing is done in self.MultiRotate1D()
anObj = self.MultiRotate1DByStep(aShape, aVec, anAngle, aNbTimes, theName)
return anObj
## The same, as MultiRotate2DNbTimes(), but axis is given by direction and point
#
# @ref swig_MakeMultiRotation "Example"
def MakeMultiRotation2DNbTimes(self, aShape, aDir, aPoint, nbtimes1, aStep, nbtimes2, theName=None):
"""
The same, as MultiRotate2DNbTimes(), but axis is given by direction and point
Example of usage:
pz = geompy.MakeVertex(0, 0, 100)
vy = geompy.MakeVectorDXDYDZ(0, 100, 0)
MultiRot2D = geompy.MakeMultiRotation2DNbTimes(f12, vy, pz, 6, 30, 3)
"""
# Example: see GEOM_TestOthers.py
aVec = self.MakeLine(aPoint,aDir)
# note: auto-publishing is done in self.MultiRotate2DNbTimes()
anObj = self.MultiRotate2DNbTimes(aShape, aVec, nbtimes1, aStep, nbtimes2, theName)
return anObj
## The same, as MultiRotate2DByStep(), but axis is given by direction and point
#
# @ref swig_MakeMultiRotation "Example"
def MakeMultiRotation2DByStep(self, aShape, aDir, aPoint, anAngle, nbtimes1, aStep, nbtimes2, theName=None):
"""
The same, as MultiRotate2DByStep(), but axis is given by direction and point
Example of usage:
pz = geompy.MakeVertex(0, 0, 100)
vy = geompy.MakeVectorDXDYDZ(0, 100, 0)
MultiRot2D = geompy.MakeMultiRotation2DByStep(f12, vy, pz, math.pi/4, 6, 30, 3)
"""
# Example: see GEOM_TestOthers.py
aVec = self.MakeLine(aPoint,aDir)
# note: auto-publishing is done in self.MultiRotate2D()
anObj = self.MultiRotate2DByStep(aShape, aVec, anAngle, nbtimes1, aStep, nbtimes2, theName)
return anObj
# end of l3_transform
## @}
## @addtogroup l3_transform_d
## @{
## Deprecated method. Use MultiRotate1DNbTimes instead.
def MultiRotate1D(self, theObject, theAxis, theNbTimes, theName=None):
"""
Deprecated method. Use MultiRotate1DNbTimes instead.
"""
print "The method MultiRotate1D is DEPRECATED. Use MultiRotate1DNbTimes instead."
return self.MultiRotate1DNbTimes(theObject, theAxis, theNbTimes, theName)
## The same, as MultiRotate2DByStep(), but theAngle is in degrees.
# This method is DEPRECATED. Use MultiRotate2DByStep() instead.
@ManageTransactions("TrsfOp")
def MultiRotate2D(self, theObject, theAxis, theAngle, theNbTimes1, theStep, theNbTimes2, theName=None):
"""
The same, as MultiRotate2DByStep(), but theAngle is in degrees.
This method is DEPRECATED. Use MultiRotate2DByStep() instead.
Example of usage:
rot2d = geompy.MultiRotate2D(prism, vect, 60, 4, 50, 5)
"""
print "The method MultiRotate2D is DEPRECATED. Use MultiRotate2DByStep instead."
theAngle, theNbTimes1, theStep, theNbTimes2, Parameters = ParseParameters(theAngle, theNbTimes1, theStep, theNbTimes2)
anObj = self.TrsfOp.MultiRotate2D(theObject, theAxis, theAngle, theNbTimes1, theStep, theNbTimes2)
RaiseIfFailed("MultiRotate2D", self.TrsfOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "multirotation")
return anObj
## The same, as MultiRotate1D(), but axis is given by direction and point
# This method is DEPRECATED. Use MakeMultiRotation1DNbTimes instead.
def MakeMultiRotation1D(self, aShape, aDir, aPoint, aNbTimes, theName=None):
"""
The same, as geompy.MultiRotate1D, but axis is given by direction and point.
This method is DEPRECATED. Use MakeMultiRotation1DNbTimes instead.
Example of usage:
pz = geompy.MakeVertex(0, 0, 100)
vy = geompy.MakeVectorDXDYDZ(0, 100, 0)
MultiRot1D = geompy.MakeMultiRotation1D(prism, vy, pz, 6)
"""
print "The method MakeMultiRotation1D is DEPRECATED. Use MakeMultiRotation1DNbTimes instead."
aVec = self.MakeLine(aPoint,aDir)
# note: auto-publishing is done in self.MultiRotate1D()
anObj = self.MultiRotate1D(aShape, aVec, aNbTimes, theName)
return anObj
## The same, as MultiRotate2D(), but axis is given by direction and point
# This method is DEPRECATED. Use MakeMultiRotation2DByStep instead.
def MakeMultiRotation2D(self, aShape, aDir, aPoint, anAngle, nbtimes1, aStep, nbtimes2, theName=None):
"""
The same, as MultiRotate2D(), but axis is given by direction and point
This method is DEPRECATED. Use MakeMultiRotation2DByStep instead.
Example of usage:
pz = geompy.MakeVertex(0, 0, 100)
vy = geompy.MakeVectorDXDYDZ(0, 100, 0)
MultiRot2D = geompy.MakeMultiRotation2D(f12, vy, pz, 45, 6, 30, 3)
"""
print "The method MakeMultiRotation2D is DEPRECATED. Use MakeMultiRotation2DByStep instead."
aVec = self.MakeLine(aPoint,aDir)
# note: auto-publishing is done in self.MultiRotate2D()
anObj = self.MultiRotate2D(aShape, aVec, anAngle, nbtimes1, aStep, nbtimes2, theName)
return anObj
# end of l3_transform_d
## @}
## @addtogroup l3_local
## @{
## Perform a fillet on all edges of the given shape.
# @param theShape Shape, to perform fillet on.
# @param theR Fillet radius.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fillet "Example 1"
# \n @ref swig_MakeFilletAll "Example 2"
@ManageTransactions("LocalOp")
def MakeFilletAll(self, theShape, theR, theName=None):
"""
Perform a fillet on all edges of the given shape.
Parameters:
theShape Shape, to perform fillet on.
theR Fillet radius.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
filletall = geompy.MakeFilletAll(prism, 10.)
"""
# Example: see GEOM_TestOthers.py
theR,Parameters = ParseParameters(theR)
anObj = self.LocalOp.MakeFilletAll(theShape, theR)
RaiseIfFailed("MakeFilletAll", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "fillet")
return anObj
## Perform a fillet on the specified edges/faces of the given shape
# @param theShape Shape, to perform fillet on.
# @param theR Fillet radius.
# @param theShapeType Type of shapes in <VAR>theListShapes</VAR> (see ShapeType())
# @param theListShapes Global indices of edges/faces to perform fillet on.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Global index of sub-shape can be obtained, using method GetSubShapeID().
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fillet "Example"
@ManageTransactions("LocalOp")
def MakeFillet(self, theShape, theR, theShapeType, theListShapes, theName=None):
"""
Perform a fillet on the specified edges/faces of the given shape
Parameters:
theShape Shape, to perform fillet on.
theR Fillet radius.
theShapeType Type of shapes in theListShapes (see geompy.ShapeTypes)
theListShapes Global indices of edges/faces to perform fillet on.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Global index of sub-shape can be obtained, using method geompy.GetSubShapeID
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
# get the list of IDs (IDList) for the fillet
prism_edges = geompy.SubShapeAllSortedCentres(prism, geompy.ShapeType["EDGE"])
IDlist_e = []
IDlist_e.append(geompy.GetSubShapeID(prism, prism_edges[0]))
IDlist_e.append(geompy.GetSubShapeID(prism, prism_edges[1]))
IDlist_e.append(geompy.GetSubShapeID(prism, prism_edges[2]))
# make a fillet on the specified edges of the given shape
fillet = geompy.MakeFillet(prism, 10., geompy.ShapeType["EDGE"], IDlist_e)
"""
# Example: see GEOM_TestAll.py
theR,Parameters = ParseParameters(theR)
anObj = None
if theShapeType == self.ShapeType["EDGE"]:
anObj = self.LocalOp.MakeFilletEdges(theShape, theR, theListShapes)
RaiseIfFailed("MakeFilletEdges", self.LocalOp)
else:
anObj = self.LocalOp.MakeFilletFaces(theShape, theR, theListShapes)
RaiseIfFailed("MakeFilletFaces", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "fillet")
return anObj
## The same that MakeFillet() but with two Fillet Radius R1 and R2
@ManageTransactions("LocalOp")
def MakeFilletR1R2(self, theShape, theR1, theR2, theShapeType, theListShapes, theName=None):
"""
The same that geompy.MakeFillet but with two Fillet Radius R1 and R2
Example of usage:
# get the list of IDs (IDList) for the fillet
prism_edges = geompy.SubShapeAllSortedCentres(prism, geompy.ShapeType["EDGE"])
IDlist_e = []
IDlist_e.append(geompy.GetSubShapeID(prism, prism_edges[0]))
IDlist_e.append(geompy.GetSubShapeID(prism, prism_edges[1]))
IDlist_e.append(geompy.GetSubShapeID(prism, prism_edges[2]))
# make a fillet on the specified edges of the given shape
fillet = geompy.MakeFillet(prism, 10., 15., geompy.ShapeType["EDGE"], IDlist_e)
"""
theR1,theR2,Parameters = ParseParameters(theR1,theR2)
anObj = None
if theShapeType == self.ShapeType["EDGE"]:
anObj = self.LocalOp.MakeFilletEdgesR1R2(theShape, theR1, theR2, theListShapes)
RaiseIfFailed("MakeFilletEdgesR1R2", self.LocalOp)
else:
anObj = self.LocalOp.MakeFilletFacesR1R2(theShape, theR1, theR2, theListShapes)
RaiseIfFailed("MakeFilletFacesR1R2", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "fillet")
return anObj
## Perform a fillet on the specified edges of the given shape
# @param theShape Wire Shape to perform fillet on.
# @param theR Fillet radius.
# @param theListOfVertexes Global indices of vertexes to perform fillet on.
# \note Global index of sub-shape can be obtained, using method GetSubShapeID()
# \note The list of vertices could be empty,
# in this case fillet will done done at all vertices in wire
# @param doIgnoreSecantVertices If FALSE, fillet radius is always limited
# by the length of the edges, nearest to the fillet vertex.
# But sometimes the next edge is C1 continuous with the one, nearest to
# the fillet point, and such two (or more) edges can be united to allow
# bigger radius. Set this flag to TRUE to allow collinear edges union,
# thus ignoring the secant vertex (vertices).
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fillet2d "Example"
@ManageTransactions("LocalOp")
def MakeFillet1D(self, theShape, theR, theListOfVertexes, doIgnoreSecantVertices = True, theName=None):
"""
Perform a fillet on the specified edges of the given shape
Parameters:
theShape Wire Shape to perform fillet on.
theR Fillet radius.
theListOfVertexes Global indices of vertexes to perform fillet on.
doIgnoreSecantVertices If FALSE, fillet radius is always limited
by the length of the edges, nearest to the fillet vertex.
But sometimes the next edge is C1 continuous with the one, nearest to
the fillet point, and such two (or more) edges can be united to allow
bigger radius. Set this flag to TRUE to allow collinear edges union,
thus ignoring the secant vertex (vertices).
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Global index of sub-shape can be obtained, using method geompy.GetSubShapeID
The list of vertices could be empty,in this case fillet will done done at all vertices in wire
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
# create wire
Wire_1 = geompy.MakeWire([Edge_12, Edge_7, Edge_11, Edge_6, Edge_1,Edge_4])
# make fillet at given wire vertices with giver radius
Fillet_1D_1 = geompy.MakeFillet1D(Wire_1, 55, [3, 4, 6, 8, 10])
"""
# Example: see GEOM_TestAll.py
theR,doIgnoreSecantVertices,Parameters = ParseParameters(theR,doIgnoreSecantVertices)
anObj = self.LocalOp.MakeFillet1D(theShape, theR, theListOfVertexes, doIgnoreSecantVertices)
RaiseIfFailed("MakeFillet1D", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "fillet")
return anObj
## Perform a fillet at the specified vertices of the given face/shell.
# @param theShape Face or Shell shape to perform fillet on.
# @param theR Fillet radius.
# @param theListOfVertexes Global indices of vertexes to perform fillet on.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Global index of sub-shape can be obtained, using method GetSubShapeID().
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_fillet2d "Example"
@ManageTransactions("LocalOp")
def MakeFillet2D(self, theShape, theR, theListOfVertexes, theName=None):
"""
Perform a fillet at the specified vertices of the given face/shell.
Parameters:
theShape Face or Shell shape to perform fillet on.
theR Fillet radius.
theListOfVertexes Global indices of vertexes to perform fillet on.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Global index of sub-shape can be obtained, using method geompy.GetSubShapeID
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
face = geompy.MakeFaceHW(100, 100, 1)
fillet2d = geompy.MakeFillet2D(face, 30, [7, 9])
"""
# Example: see GEOM_TestAll.py
theR,Parameters = ParseParameters(theR)
anObj = self.LocalOp.MakeFillet2D(theShape, theR, theListOfVertexes)
RaiseIfFailed("MakeFillet2D", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "fillet")
return anObj
## Perform a symmetric chamfer on all edges of the given shape.
# @param theShape Shape, to perform chamfer on.
# @param theD Chamfer size along each face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_chamfer "Example 1"
# \n @ref swig_MakeChamferAll "Example 2"
@ManageTransactions("LocalOp")
def MakeChamferAll(self, theShape, theD, theName=None):
"""
Perform a symmetric chamfer on all edges of the given shape.
Parameters:
theShape Shape, to perform chamfer on.
theD Chamfer size along each face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
chamfer_all = geompy.MakeChamferAll(prism, 10.)
"""
# Example: see GEOM_TestOthers.py
theD,Parameters = ParseParameters(theD)
anObj = self.LocalOp.MakeChamferAll(theShape, theD)
RaiseIfFailed("MakeChamferAll", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## Perform a chamfer on edges, common to the specified faces,
# with distance D1 on the Face1
# @param theShape Shape, to perform chamfer on.
# @param theD1 Chamfer size along \a theFace1.
# @param theD2 Chamfer size along \a theFace2.
# @param theFace1,theFace2 Global indices of two faces of \a theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Global index of sub-shape can be obtained, using method GetSubShapeID().
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_chamfer "Example"
@ManageTransactions("LocalOp")
def MakeChamferEdge(self, theShape, theD1, theD2, theFace1, theFace2, theName=None):
"""
Perform a chamfer on edges, common to the specified faces,
with distance D1 on the Face1
Parameters:
theShape Shape, to perform chamfer on.
theD1 Chamfer size along theFace1.
theD2 Chamfer size along theFace2.
theFace1,theFace2 Global indices of two faces of theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Global index of sub-shape can be obtained, using method geompy.GetSubShapeID
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
prism_faces = geompy.SubShapeAllSortedCentres(prism, geompy.ShapeType["FACE"])
f_ind_1 = geompy.GetSubShapeID(prism, prism_faces[0])
f_ind_2 = geompy.GetSubShapeID(prism, prism_faces[1])
chamfer_e = geompy.MakeChamferEdge(prism, 10., 10., f_ind_1, f_ind_2)
"""
# Example: see GEOM_TestAll.py
theD1,theD2,Parameters = ParseParameters(theD1,theD2)
anObj = self.LocalOp.MakeChamferEdge(theShape, theD1, theD2, theFace1, theFace2)
RaiseIfFailed("MakeChamferEdge", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## Perform a chamfer on edges
# @param theShape Shape, to perform chamfer on.
# @param theD Chamfer length
# @param theAngle Angle of chamfer (angle in radians or a name of variable which defines angle in degrees)
# @param theFace1,theFace2 Global indices of two faces of \a theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Global index of sub-shape can be obtained, using method GetSubShapeID().
#
# @return New GEOM.GEOM_Object, containing the result shape.
@ManageTransactions("LocalOp")
def MakeChamferEdgeAD(self, theShape, theD, theAngle, theFace1, theFace2, theName=None):
"""
Perform a chamfer on edges
Parameters:
theShape Shape, to perform chamfer on.
theD1 Chamfer size along theFace1.
theAngle Angle of chamfer (angle in radians or a name of variable which defines angle in degrees).
theFace1,theFace2 Global indices of two faces of theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Global index of sub-shape can be obtained, using method geompy.GetSubShapeID
Returns:
New GEOM.GEOM_Object, containing the result shape.
Example of usage:
prism_faces = geompy.SubShapeAllSortedCentres(prism, geompy.ShapeType["FACE"])
f_ind_1 = geompy.GetSubShapeID(prism, prism_faces[0])
f_ind_2 = geompy.GetSubShapeID(prism, prism_faces[1])
ang = 30
chamfer_e = geompy.MakeChamferEdge(prism, 10., ang, f_ind_1, f_ind_2)
"""
flag = False
if isinstance(theAngle,str):
flag = True
theD,theAngle,Parameters = ParseParameters(theD,theAngle)
if flag:
theAngle = theAngle*math.pi/180.0
anObj = self.LocalOp.MakeChamferEdgeAD(theShape, theD, theAngle, theFace1, theFace2)
RaiseIfFailed("MakeChamferEdgeAD", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## Perform a chamfer on all edges of the specified faces,
# with distance D1 on the first specified face (if several for one edge)
# @param theShape Shape, to perform chamfer on.
# @param theD1 Chamfer size along face from \a theFaces. If both faces,
# connected to the edge, are in \a theFaces, \a theD1
# will be get along face, which is nearer to \a theFaces beginning.
# @param theD2 Chamfer size along another of two faces, connected to the edge.
# @param theFaces Sequence of global indices of faces of \a theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Global index of sub-shape can be obtained, using method GetSubShapeID().
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_chamfer "Example"
@ManageTransactions("LocalOp")
def MakeChamferFaces(self, theShape, theD1, theD2, theFaces, theName=None):
"""
Perform a chamfer on all edges of the specified faces,
with distance D1 on the first specified face (if several for one edge)
Parameters:
theShape Shape, to perform chamfer on.
theD1 Chamfer size along face from theFaces. If both faces,
connected to the edge, are in theFaces, theD1
will be get along face, which is nearer to theFaces beginning.
theD2 Chamfer size along another of two faces, connected to the edge.
theFaces Sequence of global indices of faces of theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note: Global index of sub-shape can be obtained, using method geompy.GetSubShapeID().
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_TestAll.py
theD1,theD2,Parameters = ParseParameters(theD1,theD2)
anObj = self.LocalOp.MakeChamferFaces(theShape, theD1, theD2, theFaces)
RaiseIfFailed("MakeChamferFaces", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## The Same that MakeChamferFaces() but with params theD is chamfer lenght and
# theAngle is Angle of chamfer (angle in radians or a name of variable which defines angle in degrees)
#
# @ref swig_FilletChamfer "Example"
@ManageTransactions("LocalOp")
def MakeChamferFacesAD(self, theShape, theD, theAngle, theFaces, theName=None):
"""
The Same that geompy.MakeChamferFaces but with params theD is chamfer lenght and
theAngle is Angle of chamfer (angle in radians or a name of variable which defines angle in degrees)
"""
flag = False
if isinstance(theAngle,str):
flag = True
theD,theAngle,Parameters = ParseParameters(theD,theAngle)
if flag:
theAngle = theAngle*math.pi/180.0
anObj = self.LocalOp.MakeChamferFacesAD(theShape, theD, theAngle, theFaces)
RaiseIfFailed("MakeChamferFacesAD", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## Perform a chamfer on edges,
# with distance D1 on the first specified face (if several for one edge)
# @param theShape Shape, to perform chamfer on.
# @param theD1,theD2 Chamfer size
# @param theEdges Sequence of edges of \a theShape.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref swig_FilletChamfer "Example"
@ManageTransactions("LocalOp")
def MakeChamferEdges(self, theShape, theD1, theD2, theEdges, theName=None):
"""
Perform a chamfer on edges,
with distance D1 on the first specified face (if several for one edge)
Parameters:
theShape Shape, to perform chamfer on.
theD1,theD2 Chamfer size
theEdges Sequence of edges of theShape.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
theD1,theD2,Parameters = ParseParameters(theD1,theD2)
anObj = self.LocalOp.MakeChamferEdges(theShape, theD1, theD2, theEdges)
RaiseIfFailed("MakeChamferEdges", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## The Same that MakeChamferEdges() but with params theD is chamfer lenght and
# theAngle is Angle of chamfer (angle in radians or a name of variable which defines angle in degrees)
@ManageTransactions("LocalOp")
def MakeChamferEdgesAD(self, theShape, theD, theAngle, theEdges, theName=None):
"""
The Same that geompy.MakeChamferEdges but with params theD is chamfer lenght and
theAngle is Angle of chamfer (angle in radians or a name of variable which defines angle in degrees)
"""
flag = False
if isinstance(theAngle,str):
flag = True
theD,theAngle,Parameters = ParseParameters(theD,theAngle)
if flag:
theAngle = theAngle*math.pi/180.0
anObj = self.LocalOp.MakeChamferEdgesAD(theShape, theD, theAngle, theEdges)
RaiseIfFailed("MakeChamferEdgesAD", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "chamfer")
return anObj
## @sa MakeChamferEdge(), MakeChamferFaces()
#
# @ref swig_MakeChamfer "Example"
def MakeChamfer(self, aShape, d1, d2, aShapeType, ListShape, theName=None):
"""
See geompy.MakeChamferEdge() and geompy.MakeChamferFaces() functions for more information.
"""
# Example: see GEOM_TestOthers.py
anObj = None
# note: auto-publishing is done in self.MakeChamferEdge() or self.MakeChamferFaces()
if aShapeType == self.ShapeType["EDGE"]:
anObj = self.MakeChamferEdge(aShape,d1,d2,ListShape[0],ListShape[1],theName)
else:
anObj = self.MakeChamferFaces(aShape,d1,d2,ListShape,theName)
return anObj
## Remove material from a solid by extrusion of the base shape on the given distance.
# @param theInit Shape to remove material from. It must be a solid or
# a compound made of a single solid.
# @param theBase Closed edge or wire defining the base shape to be extruded.
# @param theH Prism dimension along the normal to theBase
# @param theAngle Draft angle in degrees.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the initial shape with removed material
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakeExtrudedCut(self, theInit, theBase, theH, theAngle, theName=None):
"""
Add material to a solid by extrusion of the base shape on the given distance.
Parameters:
theInit Shape to remove material from. It must be a solid or a compound made of a single solid.
theBase Closed edge or wire defining the base shape to be extruded.
theH Prism dimension along the normal to theBase
theAngle Draft angle in degrees.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the initial shape with removed material.
"""
# Example: see GEOM_TestAll.py
#theH,Parameters = ParseParameters(theH)
anObj = self.PrimOp.MakeDraftPrism(theInit, theBase, theH, theAngle, False)
RaiseIfFailed("MakeExtrudedBoss", self.PrimOp)
#anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "extrudedCut")
return anObj
## Add material to a solid by extrusion of the base shape on the given distance.
# @param theInit Shape to add material to. It must be a solid or
# a compound made of a single solid.
# @param theBase Closed edge or wire defining the base shape to be extruded.
# @param theH Prism dimension along the normal to theBase
# @param theAngle Draft angle in degrees.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the initial shape with added material
#
# @ref tui_creation_prism "Example"
@ManageTransactions("PrimOp")
def MakeExtrudedBoss(self, theInit, theBase, theH, theAngle, theName=None):
"""
Add material to a solid by extrusion of the base shape on the given distance.
Parameters:
theInit Shape to add material to. It must be a solid or a compound made of a single solid.
theBase Closed edge or wire defining the base shape to be extruded.
theH Prism dimension along the normal to theBase
theAngle Draft angle in degrees.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the initial shape with added material.
"""
# Example: see GEOM_TestAll.py
#theH,Parameters = ParseParameters(theH)
anObj = self.PrimOp.MakeDraftPrism(theInit, theBase, theH, theAngle, True)
RaiseIfFailed("MakeExtrudedBoss", self.PrimOp)
#anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "extrudedBoss")
return anObj
# end of l3_local
## @}
## @addtogroup l3_basic_op
## @{
## Perform an Archimde operation on the given shape with given parameters.
# The object presenting the resulting face is returned.
# @param theShape Shape to be put in water.
# @param theWeight Weight og the shape.
# @param theWaterDensity Density of the water.
# @param theMeshDeflection Deflection of the mesh, using to compute the section.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing a section of \a theShape
# by a plane, corresponding to water level.
#
# @ref tui_archimede "Example"
@ManageTransactions("LocalOp")
def Archimede(self, theShape, theWeight, theWaterDensity, theMeshDeflection, theName=None):
"""
Perform an Archimde operation on the given shape with given parameters.
The object presenting the resulting face is returned.
Parameters:
theShape Shape to be put in water.
theWeight Weight og the shape.
theWaterDensity Density of the water.
theMeshDeflection Deflection of the mesh, using to compute the section.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing a section of theShape
by a plane, corresponding to water level.
"""
# Example: see GEOM_TestAll.py
theWeight,theWaterDensity,theMeshDeflection,Parameters = ParseParameters(
theWeight,theWaterDensity,theMeshDeflection)
anObj = self.LocalOp.MakeArchimede(theShape, theWeight, theWaterDensity, theMeshDeflection)
RaiseIfFailed("MakeArchimede", self.LocalOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "archimede")
return anObj
# end of l3_basic_op
## @}
## @addtogroup l2_measure
## @{
## Get point coordinates
# @return [x, y, z]
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def PointCoordinates(self,Point):
"""
Get point coordinates
Returns:
[x, y, z]
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.PointCoordinates(Point)
RaiseIfFailed("PointCoordinates", self.MeasuOp)
return aTuple
## Get vector coordinates
# @return [x, y, z]
#
# @ref tui_measurement_tools_page "Example"
def VectorCoordinates(self,Vector):
"""
Get vector coordinates
Returns:
[x, y, z]
"""
p1=self.GetFirstVertex(Vector)
p2=self.GetLastVertex(Vector)
X1=self.PointCoordinates(p1)
X2=self.PointCoordinates(p2)
return (X2[0]-X1[0],X2[1]-X1[1],X2[2]-X1[2])
## Compute cross product
# @return vector w=u^v
#
# @ref tui_measurement_tools_page "Example"
def CrossProduct(self, Vector1, Vector2):
"""
Compute cross product
Returns: vector w=u^v
"""
u=self.VectorCoordinates(Vector1)
v=self.VectorCoordinates(Vector2)
w=self.MakeVectorDXDYDZ(u[1]*v[2]-u[2]*v[1], u[2]*v[0]-u[0]*v[2], u[0]*v[1]-u[1]*v[0])
return w
## Compute cross product
# @return dot product p=u.v
#
# @ref tui_measurement_tools_page "Example"
def DotProduct(self, Vector1, Vector2):
"""
Compute cross product
Returns: dot product p=u.v
"""
u=self.VectorCoordinates(Vector1)
v=self.VectorCoordinates(Vector2)
p=u[0]*v[0]+u[1]*v[1]+u[2]*v[2]
return p
## Get summarized length of all wires,
# area of surface and volume of the given shape.
# @param theShape Shape to define properties of.
# @return [theLength, theSurfArea, theVolume]\n
# theLength: Summarized length of all wires of the given shape.\n
# theSurfArea: Area of surface of the given shape.\n
# theVolume: Volume of the given shape.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def BasicProperties(self,theShape):
"""
Get summarized length of all wires,
area of surface and volume of the given shape.
Parameters:
theShape Shape to define properties of.
Returns:
[theLength, theSurfArea, theVolume]
theLength: Summarized length of all wires of the given shape.
theSurfArea: Area of surface of the given shape.
theVolume: Volume of the given shape.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetBasicProperties(theShape)
RaiseIfFailed("GetBasicProperties", self.MeasuOp)
return aTuple
## Get parameters of bounding box of the given shape
# @param theShape Shape to obtain bounding box of.
# @param precise TRUE for precise computation; FALSE for fast one.
# @return [Xmin,Xmax, Ymin,Ymax, Zmin,Zmax]
# Xmin,Xmax: Limits of shape along OX axis.
# Ymin,Ymax: Limits of shape along OY axis.
# Zmin,Zmax: Limits of shape along OZ axis.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def BoundingBox (self, theShape, precise=False):
"""
Get parameters of bounding box of the given shape
Parameters:
theShape Shape to obtain bounding box of.
precise TRUE for precise computation; FALSE for fast one.
Returns:
[Xmin,Xmax, Ymin,Ymax, Zmin,Zmax]
Xmin,Xmax: Limits of shape along OX axis.
Ymin,Ymax: Limits of shape along OY axis.
Zmin,Zmax: Limits of shape along OZ axis.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetBoundingBox(theShape, precise)
RaiseIfFailed("GetBoundingBox", self.MeasuOp)
return aTuple
## Get bounding box of the given shape
# @param theShape Shape to obtain bounding box of.
# @param precise TRUE for precise computation; FALSE for fast one.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created box.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def MakeBoundingBox (self, theShape, precise=False, theName=None):
"""
Get bounding box of the given shape
Parameters:
theShape Shape to obtain bounding box of.
precise TRUE for precise computation; FALSE for fast one.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created box.
"""
# Example: see GEOM_TestMeasures.py
anObj = self.MeasuOp.MakeBoundingBox(theShape, precise)
RaiseIfFailed("MakeBoundingBox", self.MeasuOp)
self._autoPublish(anObj, theName, "bndbox")
return anObj
## Get inertia matrix and moments of inertia of theShape.
# @param theShape Shape to calculate inertia of.
# @return [I11,I12,I13, I21,I22,I23, I31,I32,I33, Ix,Iy,Iz]
# I(1-3)(1-3): Components of the inertia matrix of the given shape.
# Ix,Iy,Iz: Moments of inertia of the given shape.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def Inertia(self,theShape):
"""
Get inertia matrix and moments of inertia of theShape.
Parameters:
theShape Shape to calculate inertia of.
Returns:
[I11,I12,I13, I21,I22,I23, I31,I32,I33, Ix,Iy,Iz]
I(1-3)(1-3): Components of the inertia matrix of the given shape.
Ix,Iy,Iz: Moments of inertia of the given shape.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetInertia(theShape)
RaiseIfFailed("GetInertia", self.MeasuOp)
return aTuple
## Get if coords are included in the shape (ST_IN or ST_ON)
# @param theShape Shape
# @param coords list of points coordinates [x1, y1, z1, x2, y2, z2, ...]
# @param tolerance to be used (default is 1.0e-7)
# @return list_of_boolean = [res1, res2, ...]
@ManageTransactions("MeasuOp")
def AreCoordsInside(self, theShape, coords, tolerance=1.e-7):
"""
Get if coords are included in the shape (ST_IN or ST_ON)
Parameters:
theShape Shape
coords list of points coordinates [x1, y1, z1, x2, y2, z2, ...]
tolerance to be used (default is 1.0e-7)
Returns:
list_of_boolean = [res1, res2, ...]
"""
return self.MeasuOp.AreCoordsInside(theShape, coords, tolerance)
## Get minimal distance between the given shapes.
# @param theShape1,theShape2 Shapes to find minimal distance between.
# @return Value of the minimal distance between the given shapes.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def MinDistance(self, theShape1, theShape2):
"""
Get minimal distance between the given shapes.
Parameters:
theShape1,theShape2 Shapes to find minimal distance between.
Returns:
Value of the minimal distance between the given shapes.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetMinDistance(theShape1, theShape2)
RaiseIfFailed("GetMinDistance", self.MeasuOp)
return aTuple[0]
## Get minimal distance between the given shapes.
# @param theShape1,theShape2 Shapes to find minimal distance between.
# @return Value of the minimal distance between the given shapes, in form of list
# [Distance, DX, DY, DZ].
#
# @ref swig_all_measure "Example"
@ManageTransactions("MeasuOp")
def MinDistanceComponents(self, theShape1, theShape2):
"""
Get minimal distance between the given shapes.
Parameters:
theShape1,theShape2 Shapes to find minimal distance between.
Returns:
Value of the minimal distance between the given shapes, in form of list
[Distance, DX, DY, DZ]
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetMinDistance(theShape1, theShape2)
RaiseIfFailed("GetMinDistance", self.MeasuOp)
aRes = [aTuple[0], aTuple[4] - aTuple[1], aTuple[5] - aTuple[2], aTuple[6] - aTuple[3]]
return aRes
## Get closest points of the given shapes.
# @param theShape1,theShape2 Shapes to find closest points of.
# @return The number of found solutions (-1 in case of infinite number of
# solutions) and a list of (X, Y, Z) coordinates for all couples of points.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def ClosestPoints (self, theShape1, theShape2):
"""
Get closest points of the given shapes.
Parameters:
theShape1,theShape2 Shapes to find closest points of.
Returns:
The number of found solutions (-1 in case of infinite number of
solutions) and a list of (X, Y, Z) coordinates for all couples of points.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.ClosestPoints(theShape1, theShape2)
RaiseIfFailed("ClosestPoints", self.MeasuOp)
return aTuple
## Get angle between the given shapes in degrees.
# @param theShape1,theShape2 Lines or linear edges to find angle between.
# @note If both arguments are vectors, the angle is computed in accordance
# with their orientations, otherwise the minimum angle is computed.
# @return Value of the angle between the given shapes in degrees.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def GetAngle(self, theShape1, theShape2):
"""
Get angle between the given shapes in degrees.
Parameters:
theShape1,theShape2 Lines or linear edges to find angle between.
Note:
If both arguments are vectors, the angle is computed in accordance
with their orientations, otherwise the minimum angle is computed.
Returns:
Value of the angle between the given shapes in degrees.
"""
# Example: see GEOM_TestMeasures.py
anAngle = self.MeasuOp.GetAngle(theShape1, theShape2)
RaiseIfFailed("GetAngle", self.MeasuOp)
return anAngle
## Get angle between the given shapes in radians.
# @param theShape1,theShape2 Lines or linear edges to find angle between.
# @note If both arguments are vectors, the angle is computed in accordance
# with their orientations, otherwise the minimum angle is computed.
# @return Value of the angle between the given shapes in radians.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def GetAngleRadians(self, theShape1, theShape2):
"""
Get angle between the given shapes in radians.
Parameters:
theShape1,theShape2 Lines or linear edges to find angle between.
Note:
If both arguments are vectors, the angle is computed in accordance
with their orientations, otherwise the minimum angle is computed.
Returns:
Value of the angle between the given shapes in radians.
"""
# Example: see GEOM_TestMeasures.py
anAngle = self.MeasuOp.GetAngle(theShape1, theShape2)*math.pi/180.
RaiseIfFailed("GetAngle", self.MeasuOp)
return anAngle
## Get angle between the given vectors in degrees.
# @param theShape1,theShape2 Vectors to find angle between.
# @param theFlag If True, the normal vector is defined by the two vectors cross,
# if False, the opposite vector to the normal vector is used.
# @return Value of the angle between the given vectors in degrees.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def GetAngleVectors(self, theShape1, theShape2, theFlag = True):
"""
Get angle between the given vectors in degrees.
Parameters:
theShape1,theShape2 Vectors to find angle between.
theFlag If True, the normal vector is defined by the two vectors cross,
if False, the opposite vector to the normal vector is used.
Returns:
Value of the angle between the given vectors in degrees.
"""
anAngle = self.MeasuOp.GetAngleBtwVectors(theShape1, theShape2)
if not theFlag:
anAngle = 360. - anAngle
RaiseIfFailed("GetAngleVectors", self.MeasuOp)
return anAngle
## The same as GetAngleVectors, but the result is in radians.
def GetAngleRadiansVectors(self, theShape1, theShape2, theFlag = True):
"""
Get angle between the given vectors in radians.
Parameters:
theShape1,theShape2 Vectors to find angle between.
theFlag If True, the normal vector is defined by the two vectors cross,
if False, the opposite vector to the normal vector is used.
Returns:
Value of the angle between the given vectors in radians.
"""
anAngle = self.GetAngleVectors(theShape1, theShape2, theFlag)*math.pi/180.
return anAngle
## @name Curve Curvature Measurement
# Methods for receiving radius of curvature of curves
# in the given point
## @{
## Measure curvature of a curve at a point, set by parameter.
# @param theCurve a curve.
# @param theParam parameter.
# @return radius of curvature of \a theCurve.
#
# @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def CurveCurvatureByParam(self, theCurve, theParam):
"""
Measure curvature of a curve at a point, set by parameter.
Parameters:
theCurve a curve.
theParam parameter.
Returns:
radius of curvature of theCurve.
"""
# Example: see GEOM_TestMeasures.py
aCurv = self.MeasuOp.CurveCurvatureByParam(theCurve,theParam)
RaiseIfFailed("CurveCurvatureByParam", self.MeasuOp)
return aCurv
## Measure curvature of a curve at a point.
# @param theCurve a curve.
# @param thePoint given point.
# @return radius of curvature of \a theCurve.
#
# @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def CurveCurvatureByPoint(self, theCurve, thePoint):
"""
Measure curvature of a curve at a point.
Parameters:
theCurve a curve.
thePoint given point.
Returns:
radius of curvature of theCurve.
"""
aCurv = self.MeasuOp.CurveCurvatureByPoint(theCurve,thePoint)
RaiseIfFailed("CurveCurvatureByPoint", self.MeasuOp)
return aCurv
## @}
## @name Surface Curvature Measurement
# Methods for receiving max and min radius of curvature of surfaces
# in the given point
## @{
## Measure max radius of curvature of surface.
# @param theSurf the given surface.
# @param theUParam Value of U-parameter on the referenced surface.
# @param theVParam Value of V-parameter on the referenced surface.
# @return max radius of curvature of theSurf.
#
## @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def MaxSurfaceCurvatureByParam(self, theSurf, theUParam, theVParam):
"""
Measure max radius of curvature of surface.
Parameters:
theSurf the given surface.
theUParam Value of U-parameter on the referenced surface.
theVParam Value of V-parameter on the referenced surface.
Returns:
max radius of curvature of theSurf.
"""
# Example: see GEOM_TestMeasures.py
aSurf = self.MeasuOp.MaxSurfaceCurvatureByParam(theSurf,theUParam,theVParam)
RaiseIfFailed("MaxSurfaceCurvatureByParam", self.MeasuOp)
return aSurf
## Measure max radius of curvature of surface in the given point
# @param theSurf the given surface.
# @param thePoint given point.
# @return max radius of curvature of theSurf.
#
## @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def MaxSurfaceCurvatureByPoint(self, theSurf, thePoint):
"""
Measure max radius of curvature of surface in the given point.
Parameters:
theSurf the given surface.
thePoint given point.
Returns:
max radius of curvature of theSurf.
"""
aSurf = self.MeasuOp.MaxSurfaceCurvatureByPoint(theSurf,thePoint)
RaiseIfFailed("MaxSurfaceCurvatureByPoint", self.MeasuOp)
return aSurf
## Measure min radius of curvature of surface.
# @param theSurf the given surface.
# @param theUParam Value of U-parameter on the referenced surface.
# @param theVParam Value of V-parameter on the referenced surface.
# @return min radius of curvature of theSurf.
#
## @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def MinSurfaceCurvatureByParam(self, theSurf, theUParam, theVParam):
"""
Measure min radius of curvature of surface.
Parameters:
theSurf the given surface.
theUParam Value of U-parameter on the referenced surface.
theVParam Value of V-parameter on the referenced surface.
Returns:
Min radius of curvature of theSurf.
"""
aSurf = self.MeasuOp.MinSurfaceCurvatureByParam(theSurf,theUParam,theVParam)
RaiseIfFailed("MinSurfaceCurvatureByParam", self.MeasuOp)
return aSurf
## Measure min radius of curvature of surface in the given point
# @param theSurf the given surface.
# @param thePoint given point.
# @return min radius of curvature of theSurf.
#
## @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def MinSurfaceCurvatureByPoint(self, theSurf, thePoint):
"""
Measure min radius of curvature of surface in the given point.
Parameters:
theSurf the given surface.
thePoint given point.
Returns:
Min radius of curvature of theSurf.
"""
aSurf = self.MeasuOp.MinSurfaceCurvatureByPoint(theSurf,thePoint)
RaiseIfFailed("MinSurfaceCurvatureByPoint", self.MeasuOp)
return aSurf
## @}
## Get min and max tolerances of sub-shapes of theShape
# @param theShape Shape, to get tolerances of.
# @return [FaceMin,FaceMax, EdgeMin,EdgeMax, VertMin,VertMax]\n
# FaceMin,FaceMax: Min and max tolerances of the faces.\n
# EdgeMin,EdgeMax: Min and max tolerances of the edges.\n
# VertMin,VertMax: Min and max tolerances of the vertices.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def Tolerance(self,theShape):
"""
Get min and max tolerances of sub-shapes of theShape
Parameters:
theShape Shape, to get tolerances of.
Returns:
[FaceMin,FaceMax, EdgeMin,EdgeMax, VertMin,VertMax]
FaceMin,FaceMax: Min and max tolerances of the faces.
EdgeMin,EdgeMax: Min and max tolerances of the edges.
VertMin,VertMax: Min and max tolerances of the vertices.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetTolerance(theShape)
RaiseIfFailed("GetTolerance", self.MeasuOp)
return aTuple
## Obtain description of the given shape (number of sub-shapes of each type)
# @param theShape Shape to be described.
# @return Description of the given shape.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def WhatIs(self,theShape):
"""
Obtain description of the given shape (number of sub-shapes of each type)
Parameters:
theShape Shape to be described.
Returns:
Description of the given shape.
"""
# Example: see GEOM_TestMeasures.py
aDescr = self.MeasuOp.WhatIs(theShape)
RaiseIfFailed("WhatIs", self.MeasuOp)
return aDescr
## Obtain quantity of shapes of the given type in \a theShape.
# If \a theShape is of type \a theType, it is also counted.
# @param theShape Shape to be described.
# @param theType the given ShapeType().
# @return Quantity of shapes of type \a theType in \a theShape.
#
# @ref tui_measurement_tools_page "Example"
def NbShapes (self, theShape, theType):
"""
Obtain quantity of shapes of the given type in theShape.
If theShape is of type theType, it is also counted.
Parameters:
theShape Shape to be described.
theType the given geompy.ShapeType
Returns:
Quantity of shapes of type theType in theShape.
"""
# Example: see GEOM_TestMeasures.py
listSh = self.SubShapeAllIDs(theShape, theType)
Nb = len(listSh)
return Nb
## Obtain quantity of shapes of each type in \a theShape.
# The \a theShape is also counted.
# @param theShape Shape to be described.
# @return Dictionary of ShapeType() with bound quantities of shapes.
#
# @ref tui_measurement_tools_page "Example"
def ShapeInfo (self, theShape):
"""
Obtain quantity of shapes of each type in theShape.
The theShape is also counted.
Parameters:
theShape Shape to be described.
Returns:
Dictionary of geompy.ShapeType with bound quantities of shapes.
"""
# Example: see GEOM_TestMeasures.py
aDict = {}
for typeSh in self.ShapeType:
if typeSh in ( "AUTO", "SHAPE" ): continue
listSh = self.SubShapeAllIDs(theShape, self.ShapeType[typeSh])
Nb = len(listSh)
aDict[typeSh] = Nb
pass
return aDict
def GetCreationInformation(self, theShape):
info = theShape.GetCreationInformation()
# operationName
opName = info.operationName
if not opName: opName = "no info available"
res = "Operation: " + opName
# parameters
for parVal in info.params:
res += " \n %s = %s" % ( parVal.name, parVal.value )
return res
## Get a point, situated at the centre of mass of theShape.
# @param theShape Shape to define centre of mass of.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created point.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def MakeCDG(self, theShape, theName=None):
"""
Get a point, situated at the centre of mass of theShape.
Parameters:
theShape Shape to define centre of mass of.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created point.
"""
# Example: see GEOM_TestMeasures.py
anObj = self.MeasuOp.GetCentreOfMass(theShape)
RaiseIfFailed("GetCentreOfMass", self.MeasuOp)
self._autoPublish(anObj, theName, "centerOfMass")
return anObj
## Get a vertex sub-shape by index depended with orientation.
# @param theShape Shape to find sub-shape.
# @param theIndex Index to find vertex by this index (starting from zero)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created vertex.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def GetVertexByIndex(self, theShape, theIndex, theName=None):
"""
Get a vertex sub-shape by index depended with orientation.
Parameters:
theShape Shape to find sub-shape.
theIndex Index to find vertex by this index (starting from zero)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created vertex.
"""
# Example: see GEOM_TestMeasures.py
anObj = self.MeasuOp.GetVertexByIndex(theShape, theIndex)
RaiseIfFailed("GetVertexByIndex", self.MeasuOp)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Get the first vertex of wire/edge depended orientation.
# @param theShape Shape to find first vertex.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created vertex.
#
# @ref tui_measurement_tools_page "Example"
def GetFirstVertex(self, theShape, theName=None):
"""
Get the first vertex of wire/edge depended orientation.
Parameters:
theShape Shape to find first vertex.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created vertex.
"""
# Example: see GEOM_TestMeasures.py
# note: auto-publishing is done in self.GetVertexByIndex()
return self.GetVertexByIndex(theShape, 0, theName)
## Get the last vertex of wire/edge depended orientation.
# @param theShape Shape to find last vertex.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created vertex.
#
# @ref tui_measurement_tools_page "Example"
def GetLastVertex(self, theShape, theName=None):
"""
Get the last vertex of wire/edge depended orientation.
Parameters:
theShape Shape to find last vertex.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created vertex.
"""
# Example: see GEOM_TestMeasures.py
nb_vert = self.NumberOfSubShapes(theShape, self.ShapeType["VERTEX"])
# note: auto-publishing is done in self.GetVertexByIndex()
return self.GetVertexByIndex(theShape, (nb_vert-1), theName)
## Get a normale to the given face. If the point is not given,
# the normale is calculated at the center of mass.
# @param theFace Face to define normale of.
# @param theOptionalPoint Point to compute the normale at.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created vector.
#
# @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def GetNormal(self, theFace, theOptionalPoint = None, theName=None):
"""
Get a normale to the given face. If the point is not given,
the normale is calculated at the center of mass.
Parameters:
theFace Face to define normale of.
theOptionalPoint Point to compute the normale at.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created vector.
"""
# Example: see GEOM_TestMeasures.py
anObj = self.MeasuOp.GetNormal(theFace, theOptionalPoint)
RaiseIfFailed("GetNormal", self.MeasuOp)
self._autoPublish(anObj, theName, "normal")
return anObj
## Print shape errors obtained from CheckShape.
# @param theShape Shape that was checked.
# @param theShapeErrors the shape errors obtained by CheckShape.
# @param theReturnStatus If 0 the description of problem is printed.
# If 1 the description of problem is returned.
# @return If theReturnStatus is equal to 1 the description is returned.
# Otherwise doesn't return anything.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def PrintShapeErrors(self, theShape, theShapeErrors, theReturnStatus = 0):
"""
Print shape errors obtained from CheckShape.
Parameters:
theShape Shape that was checked.
theShapeErrors the shape errors obtained by CheckShape.
theReturnStatus If 0 the description of problem is printed.
If 1 the description of problem is returned.
Returns:
If theReturnStatus is equal to 1 the description is returned.
Otherwise doesn't return anything.
"""
# Example: see GEOM_TestMeasures.py
Descr = self.MeasuOp.PrintShapeErrors(theShape, theShapeErrors)
if theReturnStatus == 1:
return Descr
print Descr
pass
## Check a topology of the given shape.
# @param theShape Shape to check validity of.
# @param theIsCheckGeom If FALSE, only the shape's topology will be checked, \n
# if TRUE, the shape's geometry will be checked also.
# @param theReturnStatus If 0 and if theShape is invalid, a description
# of problem is printed.
# If 1 isValid flag and the description of
# problem is returned.
# If 2 isValid flag and the list of error data
# is returned.
# @return TRUE, if the shape "seems to be valid".
# If theShape is invalid, prints a description of problem.
# If theReturnStatus is equal to 1 the description is returned
# along with IsValid flag.
# If theReturnStatus is equal to 2 the list of error data is
# returned along with IsValid flag.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def CheckShape(self,theShape, theIsCheckGeom = 0, theReturnStatus = 0):
"""
Check a topology of the given shape.
Parameters:
theShape Shape to check validity of.
theIsCheckGeom If FALSE, only the shape's topology will be checked,
if TRUE, the shape's geometry will be checked also.
theReturnStatus If 0 and if theShape is invalid, a description
of problem is printed.
If 1 IsValid flag and the description of
problem is returned.
If 2 IsValid flag and the list of error data
is returned.
Returns:
TRUE, if the shape "seems to be valid".
If theShape is invalid, prints a description of problem.
If theReturnStatus is equal to 1 the description is returned
along with IsValid flag.
If theReturnStatus is equal to 2 the list of error data is
returned along with IsValid flag.
"""
# Example: see GEOM_TestMeasures.py
if theIsCheckGeom:
(IsValid, ShapeErrors) = self.MeasuOp.CheckShapeWithGeometry(theShape)
RaiseIfFailed("CheckShapeWithGeometry", self.MeasuOp)
else:
(IsValid, ShapeErrors) = self.MeasuOp.CheckShape(theShape)
RaiseIfFailed("CheckShape", self.MeasuOp)
if IsValid == 0:
if theReturnStatus == 0:
Descr = self.MeasuOp.PrintShapeErrors(theShape, ShapeErrors)
print Descr
if theReturnStatus == 1:
Descr = self.MeasuOp.PrintShapeErrors(theShape, ShapeErrors)
return (IsValid, Descr)
elif theReturnStatus == 2:
return (IsValid, ShapeErrors)
return IsValid
## Detect self-intersections in the given shape.
# @param theShape Shape to check.
# @return TRUE, if the shape contains no self-intersections.
#
# @ref tui_measurement_tools_page "Example"
@ManageTransactions("MeasuOp")
def CheckSelfIntersections(self, theShape):
"""
Detect self-intersections in the given shape.
Parameters:
theShape Shape to check.
Returns:
TRUE, if the shape contains no self-intersections.
"""
# Example: see GEOM_TestMeasures.py
(IsValid, Pairs) = self.MeasuOp.CheckSelfIntersections(theShape)
RaiseIfFailed("CheckSelfIntersections", self.MeasuOp)
return IsValid
## Get position (LCS) of theShape.
#
# Origin of the LCS is situated at the shape's center of mass.
# Axes of the LCS are obtained from shape's location or,
# if the shape is a planar face, from position of its plane.
#
# @param theShape Shape to calculate position of.
# @return [Ox,Oy,Oz, Zx,Zy,Zz, Xx,Xy,Xz].
# Ox,Oy,Oz: Coordinates of shape's LCS origin.
# Zx,Zy,Zz: Coordinates of shape's LCS normal(main) direction.
# Xx,Xy,Xz: Coordinates of shape's LCS X direction.
#
# @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def GetPosition(self,theShape):
"""
Get position (LCS) of theShape.
Origin of the LCS is situated at the shape's center of mass.
Axes of the LCS are obtained from shape's location or,
if the shape is a planar face, from position of its plane.
Parameters:
theShape Shape to calculate position of.
Returns:
[Ox,Oy,Oz, Zx,Zy,Zz, Xx,Xy,Xz].
Ox,Oy,Oz: Coordinates of shape's LCS origin.
Zx,Zy,Zz: Coordinates of shape's LCS normal(main) direction.
Xx,Xy,Xz: Coordinates of shape's LCS X direction.
"""
# Example: see GEOM_TestMeasures.py
aTuple = self.MeasuOp.GetPosition(theShape)
RaiseIfFailed("GetPosition", self.MeasuOp)
return aTuple
## Get kind of theShape.
#
# @param theShape Shape to get a kind of.
# @return Returns a kind of shape in terms of <VAR>GEOM.GEOM_IKindOfShape.shape_kind</VAR> enumeration
# and a list of parameters, describing the shape.
# @note Concrete meaning of each value, returned via \a theIntegers
# or \a theDoubles list depends on the kind() of the shape.
#
# @ref swig_todo "Example"
@ManageTransactions("MeasuOp")
def KindOfShape(self,theShape):
"""
Get kind of theShape.
Parameters:
theShape Shape to get a kind of.
Returns:
a kind of shape in terms of GEOM_IKindOfShape.shape_kind enumeration
and a list of parameters, describing the shape.
Note:
Concrete meaning of each value, returned via theIntegers
or theDoubles list depends on the geompy.kind of the shape
"""
# Example: see GEOM_TestMeasures.py
aRoughTuple = self.MeasuOp.KindOfShape(theShape)
RaiseIfFailed("KindOfShape", self.MeasuOp)
aKind = aRoughTuple[0]
anInts = aRoughTuple[1]
aDbls = aRoughTuple[2]
# Now there is no exception from this rule:
aKindTuple = [aKind] + aDbls + anInts
# If they are we will regroup parameters for such kind of shape.
# For example:
#if aKind == kind.SOME_KIND:
# # SOME_KIND int int double int double double
# aKindTuple = [aKind, anInts[0], anInts[1], aDbls[0], anInts[2], aDbls[1], aDbls[2]]
return aKindTuple
## Returns the string that describes if the shell is good for solid.
# This is a support method for MakeSolid.
#
# @param theShell the shell to be checked.
# @return Returns a string that describes the shell validity for
# solid construction.
@ManageTransactions("MeasuOp")
def _IsGoodForSolid(self, theShell):
"""
Returns the string that describes if the shell is good for solid.
This is a support method for MakeSolid.
Parameter:
theShell the shell to be checked.
Returns:
Returns a string that describes the shell validity for
solid construction.
"""
aDescr = self.MeasuOp.IsGoodForSolid(theShell)
return aDescr
# end of l2_measure
## @}
## @addtogroup l2_import_export
## @{
## Import a shape from the BREP, IGES, STEP or other file
# (depends on given format) with given name.
#
# Note: this function is deprecated, it is kept for backward compatibility only
# Use Import<FormatName> instead, where <FormatName> is a name of desirable format to import.
#
# @param theFileName The file, containing the shape.
# @param theFormatName Specify format for the file reading.
# Available formats can be obtained with InsertOp.ImportTranslators() method.
# If format 'IGES_SCALE' is used instead of 'IGES' or
# format 'STEP_SCALE' is used instead of 'STEP',
# length unit will be set to 'meter' and result model will be scaled.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the imported shape.
# If material names are imported it returns the list of
# objects. The first one is the imported object followed by
# material groups.
# @note Auto publishing is allowed for the shape itself. Imported
# material groups are not automatically published.
#
# @ref swig_Import_Export "Example"
@ManageTransactions("InsertOp")
def ImportFile(self, theFileName, theFormatName, theName=None):
"""
Import a shape from the BREP, IGES, STEP or other file
(depends on given format) with given name.
Note: this function is deprecated, it is kept for backward compatibility only
Use Import<FormatName> instead, where <FormatName> is a name of desirable format to import.
Parameters:
theFileName The file, containing the shape.
theFormatName Specify format for the file reading.
Available formats can be obtained with geompy.InsertOp.ImportTranslators() method.
If format 'IGES_SCALE' is used instead of 'IGES' or
format 'STEP_SCALE' is used instead of 'STEP',
length unit will be set to 'meter' and result model will be scaled.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the imported shape.
If material names are imported it returns the list of
objects. The first one is the imported object followed by
material groups.
Note:
Auto publishing is allowed for the shape itself. Imported
material groups are not automatically published.
"""
# Example: see GEOM_TestOthers.py
print """
WARNING: Function ImportFile is deprecated, use Import<FormatName> instead,
where <FormatName> is a name of desirable format for importing.
"""
aListObj = self.InsertOp.ImportFile(theFileName, theFormatName)
RaiseIfFailed("ImportFile", self.InsertOp)
aNbObj = len(aListObj)
if aNbObj > 0:
self._autoPublish(aListObj[0], theName, "imported")
if aNbObj == 1:
return aListObj[0]
return aListObj
## Deprecated analog of ImportFile()
def Import(self, theFileName, theFormatName, theName=None):
"""
Deprecated analog of geompy.ImportFile, kept for backward compatibility only.
"""
# note: auto-publishing is done in self.ImportFile()
return self.ImportFile(theFileName, theFormatName, theName)
## Read a shape from the binary stream, containing its bounding representation (BRep).
# @note This method will not be dumped to the python script by DumpStudy functionality.
# @note GEOM.GEOM_Object.GetShapeStream() method can be used to obtain the shape's BRep stream.
# @param theStream The BRep binary stream.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM_Object, containing the shape, read from theStream.
#
# @ref swig_Import_Export "Example"
@ManageTransactions("InsertOp")
def RestoreShape (self, theStream, theName=None):
"""
Read a shape from the binary stream, containing its bounding representation (BRep).
Note:
shape.GetShapeStream() method can be used to obtain the shape's BRep stream.
Parameters:
theStream The BRep binary stream.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing the shape, read from theStream.
"""
# Example: see GEOM_TestOthers.py
anObj = self.InsertOp.RestoreShape(theStream)
RaiseIfFailed("RestoreShape", self.InsertOp)
self._autoPublish(anObj, theName, "restored")
return anObj
## Export the given shape into a file with given name.
#
# Note: this function is deprecated, it is kept for backward compatibility only
# Use Export<FormatName> instead, where <FormatName> is a name of desirable format to export.
#
# @param theObject Shape to be stored in the file.
# @param theFileName Name of the file to store the given shape in.
# @param theFormatName Specify format for the shape storage.
# Available formats can be obtained with
# geompy.InsertOp.ExportTranslators()[0] method.
#
# @ref swig_Import_Export "Example"
@ManageTransactions("InsertOp")
def Export(self, theObject, theFileName, theFormatName):
"""
Export the given shape into a file with given name.
Note: this function is deprecated, it is kept for backward compatibility only
Use Export<FormatName> instead, where <FormatName> is a name of desirable format to export.
Parameters:
theObject Shape to be stored in the file.
theFileName Name of the file to store the given shape in.
theFormatName Specify format for the shape storage.
Available formats can be obtained with
geompy.InsertOp.ExportTranslators()[0] method.
"""
# Example: see GEOM_TestOthers.py
print """
WARNING: Function Export is deprecated, use Export<FormatName> instead,
where <FormatName> is a name of desirable format for exporting.
"""
self.InsertOp.Export(theObject, theFileName, theFormatName)
if self.InsertOp.IsDone() == 0:
raise RuntimeError, "Export : " + self.InsertOp.GetErrorCode()
pass
pass
# end of l2_import_export
## @}
## @addtogroup l3_blocks
## @{
## Create a quadrangle face from four edges. Order of Edges is not
# important. It is not necessary that edges share the same vertex.
# @param E1,E2,E3,E4 Edges for the face bound.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_building_by_blocks_page "Example"
@ManageTransactions("BlocksOp")
def MakeQuad(self, E1, E2, E3, E4, theName=None):
"""
Create a quadrangle face from four edges. Order of Edges is not
important. It is not necessary that edges share the same vertex.
Parameters:
E1,E2,E3,E4 Edges for the face bound.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created face.
Example of usage:
qface1 = geompy.MakeQuad(edge1, edge2, edge3, edge4)
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.MakeQuad(E1, E2, E3, E4)
RaiseIfFailed("MakeQuad", self.BlocksOp)
self._autoPublish(anObj, theName, "quad")
return anObj
## Create a quadrangle face on two edges.
# The missing edges will be built by creating the shortest ones.
# @param E1,E2 Two opposite edges for the face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_building_by_blocks_page "Example"
@ManageTransactions("BlocksOp")
def MakeQuad2Edges(self, E1, E2, theName=None):
"""
Create a quadrangle face on two edges.
The missing edges will be built by creating the shortest ones.
Parameters:
E1,E2 Two opposite edges for the face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created face.
Example of usage:
# create vertices
p1 = geompy.MakeVertex( 0., 0., 0.)
p2 = geompy.MakeVertex(150., 30., 0.)
p3 = geompy.MakeVertex( 0., 120., 50.)
p4 = geompy.MakeVertex( 0., 40., 70.)
# create edges
edge1 = geompy.MakeEdge(p1, p2)
edge2 = geompy.MakeEdge(p3, p4)
# create a quadrangle face from two edges
qface2 = geompy.MakeQuad2Edges(edge1, edge2)
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.MakeQuad2Edges(E1, E2)
RaiseIfFailed("MakeQuad2Edges", self.BlocksOp)
self._autoPublish(anObj, theName, "quad")
return anObj
## Create a quadrangle face with specified corners.
# The missing edges will be built by creating the shortest ones.
# @param V1,V2,V3,V4 Corner vertices for the face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created face.
#
# @ref tui_building_by_blocks_page "Example 1"
# \n @ref swig_MakeQuad4Vertices "Example 2"
@ManageTransactions("BlocksOp")
def MakeQuad4Vertices(self, V1, V2, V3, V4, theName=None):
"""
Create a quadrangle face with specified corners.
The missing edges will be built by creating the shortest ones.
Parameters:
V1,V2,V3,V4 Corner vertices for the face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created face.
Example of usage:
# create vertices
p1 = geompy.MakeVertex( 0., 0., 0.)
p2 = geompy.MakeVertex(150., 30., 0.)
p3 = geompy.MakeVertex( 0., 120., 50.)
p4 = geompy.MakeVertex( 0., 40., 70.)
# create a quadrangle from four points in its corners
qface3 = geompy.MakeQuad4Vertices(p1, p2, p3, p4)
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.MakeQuad4Vertices(V1, V2, V3, V4)
RaiseIfFailed("MakeQuad4Vertices", self.BlocksOp)
self._autoPublish(anObj, theName, "quad")
return anObj
## Create a hexahedral solid, bounded by the six given faces. Order of
# faces is not important. It is not necessary that Faces share the same edge.
# @param F1,F2,F3,F4,F5,F6 Faces for the hexahedral solid.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created solid.
#
# @ref tui_building_by_blocks_page "Example 1"
# \n @ref swig_MakeHexa "Example 2"
@ManageTransactions("BlocksOp")
def MakeHexa(self, F1, F2, F3, F4, F5, F6, theName=None):
"""
Create a hexahedral solid, bounded by the six given faces. Order of
faces is not important. It is not necessary that Faces share the same edge.
Parameters:
F1,F2,F3,F4,F5,F6 Faces for the hexahedral solid.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created solid.
Example of usage:
solid = geompy.MakeHexa(qface1, qface2, qface3, qface4, qface5, qface6)
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.MakeHexa(F1, F2, F3, F4, F5, F6)
RaiseIfFailed("MakeHexa", self.BlocksOp)
self._autoPublish(anObj, theName, "hexa")
return anObj
## Create a hexahedral solid between two given faces.
# The missing faces will be built by creating the smallest ones.
# @param F1,F2 Two opposite faces for the hexahedral solid.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the created solid.
#
# @ref tui_building_by_blocks_page "Example 1"
# \n @ref swig_MakeHexa2Faces "Example 2"
@ManageTransactions("BlocksOp")
def MakeHexa2Faces(self, F1, F2, theName=None):
"""
Create a hexahedral solid between two given faces.
The missing faces will be built by creating the smallest ones.
Parameters:
F1,F2 Two opposite faces for the hexahedral solid.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the created solid.
Example of usage:
solid1 = geompy.MakeHexa2Faces(qface1, qface2)
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.MakeHexa2Faces(F1, F2)
RaiseIfFailed("MakeHexa2Faces", self.BlocksOp)
self._autoPublish(anObj, theName, "hexa")
return anObj
# end of l3_blocks
## @}
## @addtogroup l3_blocks_op
## @{
## Get a vertex, found in the given shape by its coordinates.
# @param theShape Block or a compound of blocks.
# @param theX,theY,theZ Coordinates of the sought vertex.
# @param theEpsilon Maximum allowed distance between the resulting
# vertex and point with the given coordinates.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found vertex.
#
# @ref swig_GetPoint "Example"
@ManageTransactions("BlocksOp")
def GetPoint(self, theShape, theX, theY, theZ, theEpsilon, theName=None):
"""
Get a vertex, found in the given shape by its coordinates.
Parameters:
theShape Block or a compound of blocks.
theX,theY,theZ Coordinates of the sought vertex.
theEpsilon Maximum allowed distance between the resulting
vertex and point with the given coordinates.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found vertex.
Example of usage:
pnt = geompy.GetPoint(shape, -50, 50, 50, 0.01)
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.GetPoint(theShape, theX, theY, theZ, theEpsilon)
RaiseIfFailed("GetPoint", self.BlocksOp)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Find a vertex of the given shape, which has minimal distance to the given point.
# @param theShape Any shape.
# @param thePoint Point, close to the desired vertex.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found vertex.
#
# @ref swig_GetVertexNearPoint "Example"
@ManageTransactions("BlocksOp")
def GetVertexNearPoint(self, theShape, thePoint, theName=None):
"""
Find a vertex of the given shape, which has minimal distance to the given point.
Parameters:
theShape Any shape.
thePoint Point, close to the desired vertex.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found vertex.
Example of usage:
pmidle = geompy.MakeVertex(50, 0, 50)
edge1 = geompy.GetEdgeNearPoint(blocksComp, pmidle)
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.GetVertexNearPoint(theShape, thePoint)
RaiseIfFailed("GetVertexNearPoint", self.BlocksOp)
self._autoPublish(anObj, theName, "vertex")
return anObj
## Get an edge, found in the given shape by two given vertices.
# @param theShape Block or a compound of blocks.
# @param thePoint1,thePoint2 Points, close to the ends of the desired edge.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found edge.
#
# @ref swig_GetEdge "Example"
@ManageTransactions("BlocksOp")
def GetEdge(self, theShape, thePoint1, thePoint2, theName=None):
"""
Get an edge, found in the given shape by two given vertices.
Parameters:
theShape Block or a compound of blocks.
thePoint1,thePoint2 Points, close to the ends of the desired edge.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found edge.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetEdge(theShape, thePoint1, thePoint2)
RaiseIfFailed("GetEdge", self.BlocksOp)
self._autoPublish(anObj, theName, "edge")
return anObj
## Find an edge of the given shape, which has minimal distance to the given point.
# @param theShape Block or a compound of blocks.
# @param thePoint Point, close to the desired edge.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found edge.
#
# @ref swig_GetEdgeNearPoint "Example"
@ManageTransactions("BlocksOp")
def GetEdgeNearPoint(self, theShape, thePoint, theName=None):
"""
Find an edge of the given shape, which has minimal distance to the given point.
Parameters:
theShape Block or a compound of blocks.
thePoint Point, close to the desired edge.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found edge.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.GetEdgeNearPoint(theShape, thePoint)
RaiseIfFailed("GetEdgeNearPoint", self.BlocksOp)
self._autoPublish(anObj, theName, "edge")
return anObj
## Returns a face, found in the given shape by four given corner vertices.
# @param theShape Block or a compound of blocks.
# @param thePoint1,thePoint2,thePoint3,thePoint4 Points, close to the corners of the desired face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found face.
#
# @ref swig_todo "Example"
@ManageTransactions("BlocksOp")
def GetFaceByPoints(self, theShape, thePoint1, thePoint2, thePoint3, thePoint4, theName=None):
"""
Returns a face, found in the given shape by four given corner vertices.
Parameters:
theShape Block or a compound of blocks.
thePoint1,thePoint2,thePoint3,thePoint4 Points, close to the corners of the desired face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found face.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetFaceByPoints(theShape, thePoint1, thePoint2, thePoint3, thePoint4)
RaiseIfFailed("GetFaceByPoints", self.BlocksOp)
self._autoPublish(anObj, theName, "face")
return anObj
## Get a face of block, found in the given shape by two given edges.
# @param theShape Block or a compound of blocks.
# @param theEdge1,theEdge2 Edges, close to the edges of the desired face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found face.
#
# @ref swig_todo "Example"
@ManageTransactions("BlocksOp")
def GetFaceByEdges(self, theShape, theEdge1, theEdge2, theName=None):
"""
Get a face of block, found in the given shape by two given edges.
Parameters:
theShape Block or a compound of blocks.
theEdge1,theEdge2 Edges, close to the edges of the desired face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found face.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetFaceByEdges(theShape, theEdge1, theEdge2)
RaiseIfFailed("GetFaceByEdges", self.BlocksOp)
self._autoPublish(anObj, theName, "face")
return anObj
## Find a face, opposite to the given one in the given block.
# @param theBlock Must be a hexahedral solid.
# @param theFace Face of \a theBlock, opposite to the desired face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found face.
#
# @ref swig_GetOppositeFace "Example"
@ManageTransactions("BlocksOp")
def GetOppositeFace(self, theBlock, theFace, theName=None):
"""
Find a face, opposite to the given one in the given block.
Parameters:
theBlock Must be a hexahedral solid.
theFace Face of theBlock, opposite to the desired face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found face.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetOppositeFace(theBlock, theFace)
RaiseIfFailed("GetOppositeFace", self.BlocksOp)
self._autoPublish(anObj, theName, "face")
return anObj
## Find a face of the given shape, which has minimal distance to the given point.
# @param theShape Block or a compound of blocks.
# @param thePoint Point, close to the desired face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found face.
#
# @ref swig_GetFaceNearPoint "Example"
@ManageTransactions("BlocksOp")
def GetFaceNearPoint(self, theShape, thePoint, theName=None):
"""
Find a face of the given shape, which has minimal distance to the given point.
Parameters:
theShape Block or a compound of blocks.
thePoint Point, close to the desired face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found face.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetFaceNearPoint(theShape, thePoint)
RaiseIfFailed("GetFaceNearPoint", self.BlocksOp)
self._autoPublish(anObj, theName, "face")
return anObj
## Find a face of block, whose outside normale has minimal angle with the given vector.
# @param theBlock Block or a compound of blocks.
# @param theVector Vector, close to the normale of the desired face.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found face.
#
# @ref swig_todo "Example"
@ManageTransactions("BlocksOp")
def GetFaceByNormale(self, theBlock, theVector, theName=None):
"""
Find a face of block, whose outside normale has minimal angle with the given vector.
Parameters:
theBlock Block or a compound of blocks.
theVector Vector, close to the normale of the desired face.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found face.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetFaceByNormale(theBlock, theVector)
RaiseIfFailed("GetFaceByNormale", self.BlocksOp)
self._autoPublish(anObj, theName, "face")
return anObj
## Find all sub-shapes of type \a theShapeType of the given shape,
# which have minimal distance to the given point.
# @param theShape Any shape.
# @param thePoint Point, close to the desired shape.
# @param theShapeType Defines what kind of sub-shapes is searched GEOM::shape_type
# @param theTolerance The tolerance for distances comparison. All shapes
# with distances to the given point in interval
# [minimal_distance, minimal_distance + theTolerance] will be gathered.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM_Object, containing a group of all found shapes.
#
# @ref swig_GetShapesNearPoint "Example"
@ManageTransactions("BlocksOp")
def GetShapesNearPoint(self, theShape, thePoint, theShapeType, theTolerance = 1e-07, theName=None):
"""
Find all sub-shapes of type theShapeType of the given shape,
which have minimal distance to the given point.
Parameters:
theShape Any shape.
thePoint Point, close to the desired shape.
theShapeType Defines what kind of sub-shapes is searched (see GEOM::shape_type)
theTolerance The tolerance for distances comparison. All shapes
with distances to the given point in interval
[minimal_distance, minimal_distance + theTolerance] will be gathered.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing a group of all found shapes.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.GetShapesNearPoint(theShape, thePoint, theShapeType, theTolerance)
RaiseIfFailed("GetShapesNearPoint", self.BlocksOp)
self._autoPublish(anObj, theName, "group")
return anObj
# end of l3_blocks_op
## @}
## @addtogroup l4_blocks_measure
## @{
## Check, if the compound of blocks is given.
# To be considered as a compound of blocks, the
# given shape must satisfy the following conditions:
# - Each element of the compound should be a Block (6 faces and 12 edges).
# - A connection between two Blocks should be an entire quadrangle face or an entire edge.
# - The compound should be connexe.
# - The glue between two quadrangle faces should be applied.
# @param theCompound The compound to check.
# @return TRUE, if the given shape is a compound of blocks.
# If theCompound is not valid, prints all discovered errors.
#
# @ref tui_measurement_tools_page "Example 1"
# \n @ref swig_CheckCompoundOfBlocks "Example 2"
@ManageTransactions("BlocksOp")
def CheckCompoundOfBlocks(self,theCompound):
"""
Check, if the compound of blocks is given.
To be considered as a compound of blocks, the
given shape must satisfy the following conditions:
- Each element of the compound should be a Block (6 faces and 12 edges).
- A connection between two Blocks should be an entire quadrangle face or an entire edge.
- The compound should be connexe.
- The glue between two quadrangle faces should be applied.
Parameters:
theCompound The compound to check.
Returns:
TRUE, if the given shape is a compound of blocks.
If theCompound is not valid, prints all discovered errors.
"""
# Example: see GEOM_Spanner.py
(IsValid, BCErrors) = self.BlocksOp.CheckCompoundOfBlocks(theCompound)
RaiseIfFailed("CheckCompoundOfBlocks", self.BlocksOp)
if IsValid == 0:
Descr = self.BlocksOp.PrintBCErrors(theCompound, BCErrors)
print Descr
return IsValid
## Retrieve all non blocks solids and faces from \a theShape.
# @param theShape The shape to explore.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return A tuple of two GEOM_Objects. The first object is a group of all
# non block solids (= not 6 faces, or with 6 faces, but with the
# presence of non-quadrangular faces). The second object is a
# group of all non quadrangular faces.
#
# @ref tui_measurement_tools_page "Example 1"
# \n @ref swig_GetNonBlocks "Example 2"
@ManageTransactions("BlocksOp")
def GetNonBlocks (self, theShape, theName=None):
"""
Retrieve all non blocks solids and faces from theShape.
Parameters:
theShape The shape to explore.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
A tuple of two GEOM_Objects. The first object is a group of all
non block solids (= not 6 faces, or with 6 faces, but with the
presence of non-quadrangular faces). The second object is a
group of all non quadrangular faces.
Usage:
(res_sols, res_faces) = geompy.GetNonBlocks(myShape1)
"""
# Example: see GEOM_Spanner.py
aTuple = self.BlocksOp.GetNonBlocks(theShape)
RaiseIfFailed("GetNonBlocks", self.BlocksOp)
self._autoPublish(aTuple, theName, ("groupNonHexas", "groupNonQuads"))
return aTuple
## Remove all seam and degenerated edges from \a theShape.
# Unite faces and edges, sharing one surface. It means that
# this faces must have references to one C++ surface object (handle).
# @param theShape The compound or single solid to remove irregular edges from.
# @param doUnionFaces If True, then unite faces. If False (the default value),
# do not unite faces.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return Improved shape.
#
# @ref swig_RemoveExtraEdges "Example"
@ManageTransactions("BlocksOp")
def RemoveExtraEdges(self, theShape, doUnionFaces=False, theName=None):
"""
Remove all seam and degenerated edges from theShape.
Unite faces and edges, sharing one surface. It means that
this faces must have references to one C++ surface object (handle).
Parameters:
theShape The compound or single solid to remove irregular edges from.
doUnionFaces If True, then unite faces. If False (the default value),
do not unite faces.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
Improved shape.
"""
# Example: see GEOM_TestOthers.py
nbFacesOptimum = -1 # -1 means do not unite faces
if doUnionFaces is True: nbFacesOptimum = 0 # 0 means unite faces
anObj = self.BlocksOp.RemoveExtraEdges(theShape, nbFacesOptimum)
RaiseIfFailed("RemoveExtraEdges", self.BlocksOp)
self._autoPublish(anObj, theName, "removeExtraEdges")
return anObj
## Performs union faces of \a theShape
# Unite faces sharing one surface. It means that
# these faces must have references to one C++ surface object (handle).
# @param theShape The compound or single solid that contains faces
# to perform union.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return Improved shape.
#
# @ref swig_UnionFaces "Example"
@ManageTransactions("BlocksOp")
def UnionFaces(self, theShape, theName=None):
"""
Performs union faces of theShape.
Unite faces sharing one surface. It means that
these faces must have references to one C++ surface object (handle).
Parameters:
theShape The compound or single solid that contains faces
to perform union.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
Improved shape.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.UnionFaces(theShape)
RaiseIfFailed("UnionFaces", self.BlocksOp)
self._autoPublish(anObj, theName, "unionFaces")
return anObj
## Check, if the given shape is a blocks compound.
# Fix all detected errors.
# \note Single block can be also fixed by this method.
# @param theShape The compound to check and improve.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return Improved compound.
#
# @ref swig_CheckAndImprove "Example"
@ManageTransactions("BlocksOp")
def CheckAndImprove(self, theShape, theName=None):
"""
Check, if the given shape is a blocks compound.
Fix all detected errors.
Note:
Single block can be also fixed by this method.
Parameters:
theShape The compound to check and improve.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
Improved compound.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.CheckAndImprove(theShape)
RaiseIfFailed("CheckAndImprove", self.BlocksOp)
self._autoPublish(anObj, theName, "improved")
return anObj
# end of l4_blocks_measure
## @}
## @addtogroup l3_blocks_op
## @{
## Get all the blocks, contained in the given compound.
# @param theCompound The compound to explode.
# @param theMinNbFaces If solid has lower number of faces, it is not a block.
# @param theMaxNbFaces If solid has higher number of faces, it is not a block.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note If theMaxNbFaces = 0, the maximum number of faces is not restricted.
#
# @return List of GEOM.GEOM_Object, containing the retrieved blocks.
#
# @ref tui_explode_on_blocks "Example 1"
# \n @ref swig_MakeBlockExplode "Example 2"
@ManageTransactions("BlocksOp")
def MakeBlockExplode(self, theCompound, theMinNbFaces, theMaxNbFaces, theName=None):
"""
Get all the blocks, contained in the given compound.
Parameters:
theCompound The compound to explode.
theMinNbFaces If solid has lower number of faces, it is not a block.
theMaxNbFaces If solid has higher number of faces, it is not a block.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
If theMaxNbFaces = 0, the maximum number of faces is not restricted.
Returns:
List of GEOM.GEOM_Object, containing the retrieved blocks.
"""
# Example: see GEOM_TestOthers.py
theMinNbFaces,theMaxNbFaces,Parameters = ParseParameters(theMinNbFaces,theMaxNbFaces)
aList = self.BlocksOp.ExplodeCompoundOfBlocks(theCompound, theMinNbFaces, theMaxNbFaces)
RaiseIfFailed("ExplodeCompoundOfBlocks", self.BlocksOp)
for anObj in aList:
anObj.SetParameters(Parameters)
pass
self._autoPublish(aList, theName, "block")
return aList
## Find block, containing the given point inside its volume or on boundary.
# @param theCompound Compound, to find block in.
# @param thePoint Point, close to the desired block. If the point lays on
# boundary between some blocks, we return block with nearest center.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found block.
#
# @ref swig_todo "Example"
@ManageTransactions("BlocksOp")
def GetBlockNearPoint(self, theCompound, thePoint, theName=None):
"""
Find block, containing the given point inside its volume or on boundary.
Parameters:
theCompound Compound, to find block in.
thePoint Point, close to the desired block. If the point lays on
boundary between some blocks, we return block with nearest center.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the found block.
"""
# Example: see GEOM_Spanner.py
anObj = self.BlocksOp.GetBlockNearPoint(theCompound, thePoint)
RaiseIfFailed("GetBlockNearPoint", self.BlocksOp)
self._autoPublish(anObj, theName, "block")
return anObj
## Find block, containing all the elements, passed as the parts, or maximum quantity of them.
# @param theCompound Compound, to find block in.
# @param theParts List of faces and/or edges and/or vertices to be parts of the found block.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the found block.
#
# @ref swig_GetBlockByParts "Example"
@ManageTransactions("BlocksOp")
def GetBlockByParts(self, theCompound, theParts, theName=None):
"""
Find block, containing all the elements, passed as the parts, or maximum quantity of them.
Parameters:
theCompound Compound, to find block in.
theParts List of faces and/or edges and/or vertices to be parts of the found block.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing the found block.
"""
# Example: see GEOM_TestOthers.py
anObj = self.BlocksOp.GetBlockByParts(theCompound, theParts)
RaiseIfFailed("GetBlockByParts", self.BlocksOp)
self._autoPublish(anObj, theName, "block")
return anObj
## Return all blocks, containing all the elements, passed as the parts.
# @param theCompound Compound, to find blocks in.
# @param theParts List of faces and/or edges and/or vertices to be parts of the found blocks.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of GEOM.GEOM_Object, containing the found blocks.
#
# @ref swig_todo "Example"
@ManageTransactions("BlocksOp")
def GetBlocksByParts(self, theCompound, theParts, theName=None):
"""
Return all blocks, containing all the elements, passed as the parts.
Parameters:
theCompound Compound, to find blocks in.
theParts List of faces and/or edges and/or vertices to be parts of the found blocks.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of GEOM.GEOM_Object, containing the found blocks.
"""
# Example: see GEOM_Spanner.py
aList = self.BlocksOp.GetBlocksByParts(theCompound, theParts)
RaiseIfFailed("GetBlocksByParts", self.BlocksOp)
self._autoPublish(aList, theName, "block")
return aList
## Multi-transformate block and glue the result.
# Transformation is defined so, as to superpose direction faces.
# @param Block Hexahedral solid to be multi-transformed.
# @param DirFace1 ID of First direction face.
# @param DirFace2 ID of Second direction face.
# @param NbTimes Quantity of transformations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @note Unique ID of sub-shape can be obtained, using method GetSubShapeID().
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_multi_transformation "Example"
@ManageTransactions("BlocksOp")
def MakeMultiTransformation1D(self, Block, DirFace1, DirFace2, NbTimes, theName=None):
"""
Multi-transformate block and glue the result.
Transformation is defined so, as to superpose direction faces.
Parameters:
Block Hexahedral solid to be multi-transformed.
DirFace1 ID of First direction face.
DirFace2 ID of Second direction face.
NbTimes Quantity of transformations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Note:
Unique ID of sub-shape can be obtained, using method GetSubShapeID().
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_Spanner.py
DirFace1,DirFace2,NbTimes,Parameters = ParseParameters(DirFace1,DirFace2,NbTimes)
anObj = self.BlocksOp.MakeMultiTransformation1D(Block, DirFace1, DirFace2, NbTimes)
RaiseIfFailed("MakeMultiTransformation1D", self.BlocksOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "transformed")
return anObj
## Multi-transformate block and glue the result.
# @param Block Hexahedral solid to be multi-transformed.
# @param DirFace1U,DirFace2U IDs of Direction faces for the first transformation.
# @param DirFace1V,DirFace2V IDs of Direction faces for the second transformation.
# @param NbTimesU,NbTimesV Quantity of transformations to be done.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM.GEOM_Object, containing the result shape.
#
# @ref tui_multi_transformation "Example"
@ManageTransactions("BlocksOp")
def MakeMultiTransformation2D(self, Block, DirFace1U, DirFace2U, NbTimesU,
DirFace1V, DirFace2V, NbTimesV, theName=None):
"""
Multi-transformate block and glue the result.
Parameters:
Block Hexahedral solid to be multi-transformed.
DirFace1U,DirFace2U IDs of Direction faces for the first transformation.
DirFace1V,DirFace2V IDs of Direction faces for the second transformation.
NbTimesU,NbTimesV Quantity of transformations to be done.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM.GEOM_Object, containing the result shape.
"""
# Example: see GEOM_Spanner.py
DirFace1U,DirFace2U,NbTimesU,DirFace1V,DirFace2V,NbTimesV,Parameters = ParseParameters(
DirFace1U,DirFace2U,NbTimesU,DirFace1V,DirFace2V,NbTimesV)
anObj = self.BlocksOp.MakeMultiTransformation2D(Block, DirFace1U, DirFace2U, NbTimesU,
DirFace1V, DirFace2V, NbTimesV)
RaiseIfFailed("MakeMultiTransformation2D", self.BlocksOp)
anObj.SetParameters(Parameters)
self._autoPublish(anObj, theName, "transformed")
return anObj
## Build all possible propagation groups.
# Propagation group is a set of all edges, opposite to one (main)
# edge of this group directly or through other opposite edges.
# Notion of Opposite Edge make sence only on quadrangle face.
# @param theShape Shape to build propagation groups on.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return List of GEOM.GEOM_Object, each of them is a propagation group.
#
# @ref swig_Propagate "Example"
@ManageTransactions("BlocksOp")
def Propagate(self, theShape, theName=None):
"""
Build all possible propagation groups.
Propagation group is a set of all edges, opposite to one (main)
edge of this group directly or through other opposite edges.
Notion of Opposite Edge make sence only on quadrangle face.
Parameters:
theShape Shape to build propagation groups on.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
List of GEOM.GEOM_Object, each of them is a propagation group.
"""
# Example: see GEOM_TestOthers.py
listChains = self.BlocksOp.Propagate(theShape)
RaiseIfFailed("Propagate", self.BlocksOp)
self._autoPublish(listChains, theName, "propagate")
return listChains
# end of l3_blocks_op
## @}
## @addtogroup l3_groups
## @{
## Creates a new group which will store sub-shapes of theMainShape
# @param theMainShape is a GEOM object on which the group is selected
# @param theShapeType defines a shape type of the group (see GEOM::shape_type)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group (GEOM.GEOM_Object)
#
# @ref tui_working_with_groups_page "Example 1"
# \n @ref swig_CreateGroup "Example 2"
@ManageTransactions("GroupOp")
def CreateGroup(self, theMainShape, theShapeType, theName=None):
"""
Creates a new group which will store sub-shapes of theMainShape
Parameters:
theMainShape is a GEOM object on which the group is selected
theShapeType defines a shape type of the group:"COMPOUND", "COMPSOLID",
"SOLID", "SHELL", "FACE", "WIRE", "EDGE", "VERTEX", "SHAPE".
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group
Example of usage:
group = geompy.CreateGroup(Box, geompy.ShapeType["FACE"])
"""
# Example: see GEOM_TestOthers.py
anObj = self.GroupOp.CreateGroup(theMainShape, theShapeType)
RaiseIfFailed("CreateGroup", self.GroupOp)
self._autoPublish(anObj, theName, "group")
return anObj
## Adds a sub-object with ID theSubShapeId to the group
# @param theGroup is a GEOM group to which the new sub-shape is added
# @param theSubShapeID is a sub-shape ID in the main object.
# \note Use method GetSubShapeID() to get an unique ID of the sub-shape
#
# @ref tui_working_with_groups_page "Example"
@ManageTransactions("GroupOp")
def AddObject(self,theGroup, theSubShapeID):
"""
Adds a sub-object with ID theSubShapeId to the group
Parameters:
theGroup is a GEOM group to which the new sub-shape is added
theSubShapeID is a sub-shape ID in the main object.
Note:
Use method GetSubShapeID() to get an unique ID of the sub-shape
"""
# Example: see GEOM_TestOthers.py
self.GroupOp.AddObject(theGroup, theSubShapeID)
if self.GroupOp.GetErrorCode() != "PAL_ELEMENT_ALREADY_PRESENT":
RaiseIfFailed("AddObject", self.GroupOp)
pass
pass
## Removes a sub-object with ID \a theSubShapeId from the group
# @param theGroup is a GEOM group from which the new sub-shape is removed
# @param theSubShapeID is a sub-shape ID in the main object.
# \note Use method GetSubShapeID() to get an unique ID of the sub-shape
#
# @ref tui_working_with_groups_page "Example"
@ManageTransactions("GroupOp")
def RemoveObject(self,theGroup, theSubShapeID):
"""
Removes a sub-object with ID theSubShapeId from the group
Parameters:
theGroup is a GEOM group from which the new sub-shape is removed
theSubShapeID is a sub-shape ID in the main object.
Note:
Use method GetSubShapeID() to get an unique ID of the sub-shape
"""
# Example: see GEOM_TestOthers.py
self.GroupOp.RemoveObject(theGroup, theSubShapeID)
RaiseIfFailed("RemoveObject", self.GroupOp)
pass
## Adds to the group all the given shapes. No errors, if some shapes are alredy included.
# @param theGroup is a GEOM group to which the new sub-shapes are added.
# @param theSubShapes is a list of sub-shapes to be added.
#
# @ref tui_working_with_groups_page "Example"
@ManageTransactions("GroupOp")
def UnionList (self,theGroup, theSubShapes):
"""
Adds to the group all the given shapes. No errors, if some shapes are alredy included.
Parameters:
theGroup is a GEOM group to which the new sub-shapes are added.
theSubShapes is a list of sub-shapes to be added.
"""
# Example: see GEOM_TestOthers.py
self.GroupOp.UnionList(theGroup, theSubShapes)
RaiseIfFailed("UnionList", self.GroupOp)
pass
## Adds to the group all the given shapes. No errors, if some shapes are alredy included.
# @param theGroup is a GEOM group to which the new sub-shapes are added.
# @param theSubShapes is a list of indices of sub-shapes to be added.
#
# @ref swig_UnionIDs "Example"
@ManageTransactions("GroupOp")
def UnionIDs(self,theGroup, theSubShapes):
"""
Adds to the group all the given shapes. No errors, if some shapes are alredy included.
Parameters:
theGroup is a GEOM group to which the new sub-shapes are added.
theSubShapes is a list of indices of sub-shapes to be added.
"""
# Example: see GEOM_TestOthers.py
self.GroupOp.UnionIDs(theGroup, theSubShapes)
RaiseIfFailed("UnionIDs", self.GroupOp)
pass
## Removes from the group all the given shapes. No errors, if some shapes are not included.
# @param theGroup is a GEOM group from which the sub-shapes are removed.
# @param theSubShapes is a list of sub-shapes to be removed.
#
# @ref tui_working_with_groups_page "Example"
@ManageTransactions("GroupOp")
def DifferenceList (self,theGroup, theSubShapes):
"""
Removes from the group all the given shapes. No errors, if some shapes are not included.
Parameters:
theGroup is a GEOM group from which the sub-shapes are removed.
theSubShapes is a list of sub-shapes to be removed.
"""
# Example: see GEOM_TestOthers.py
self.GroupOp.DifferenceList(theGroup, theSubShapes)
RaiseIfFailed("DifferenceList", self.GroupOp)
pass
## Removes from the group all the given shapes. No errors, if some shapes are not included.
# @param theGroup is a GEOM group from which the sub-shapes are removed.
# @param theSubShapes is a list of indices of sub-shapes to be removed.
#
# @ref swig_DifferenceIDs "Example"
@ManageTransactions("GroupOp")
def DifferenceIDs(self,theGroup, theSubShapes):
"""
Removes from the group all the given shapes. No errors, if some shapes are not included.
Parameters:
theGroup is a GEOM group from which the sub-shapes are removed.
theSubShapes is a list of indices of sub-shapes to be removed.
"""
# Example: see GEOM_TestOthers.py
self.GroupOp.DifferenceIDs(theGroup, theSubShapes)
RaiseIfFailed("DifferenceIDs", self.GroupOp)
pass
## Union of two groups.
# New group is created. It will contain all entities
# which are present in groups theGroup1 and theGroup2.
# @param theGroup1, theGroup2 are the initial GEOM groups
# to create the united group from.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group.
#
# @ref tui_union_groups_anchor "Example"
@ManageTransactions("GroupOp")
def UnionGroups (self, theGroup1, theGroup2, theName=None):
"""
Union of two groups.
New group is created. It will contain all entities
which are present in groups theGroup1 and theGroup2.
Parameters:
theGroup1, theGroup2 are the initial GEOM groups
to create the united group from.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group.
"""
# Example: see GEOM_TestOthers.py
aGroup = self.GroupOp.UnionGroups(theGroup1, theGroup2)
RaiseIfFailed("UnionGroups", self.GroupOp)
self._autoPublish(aGroup, theName, "group")
return aGroup
## Intersection of two groups.
# New group is created. It will contain only those entities
# which are present in both groups theGroup1 and theGroup2.
# @param theGroup1, theGroup2 are the initial GEOM groups to get common part of.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group.
#
# @ref tui_intersect_groups_anchor "Example"
@ManageTransactions("GroupOp")
def IntersectGroups (self, theGroup1, theGroup2, theName=None):
"""
Intersection of two groups.
New group is created. It will contain only those entities
which are present in both groups theGroup1 and theGroup2.
Parameters:
theGroup1, theGroup2 are the initial GEOM groups to get common part of.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group.
"""
# Example: see GEOM_TestOthers.py
aGroup = self.GroupOp.IntersectGroups(theGroup1, theGroup2)
RaiseIfFailed("IntersectGroups", self.GroupOp)
self._autoPublish(aGroup, theName, "group")
return aGroup
## Cut of two groups.
# New group is created. It will contain entities which are
# present in group theGroup1 but are not present in group theGroup2.
# @param theGroup1 is a GEOM group to include elements of.
# @param theGroup2 is a GEOM group to exclude elements of.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group.
#
# @ref tui_cut_groups_anchor "Example"
@ManageTransactions("GroupOp")
def CutGroups (self, theGroup1, theGroup2, theName=None):
"""
Cut of two groups.
New group is created. It will contain entities which are
present in group theGroup1 but are not present in group theGroup2.
Parameters:
theGroup1 is a GEOM group to include elements of.
theGroup2 is a GEOM group to exclude elements of.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group.
"""
# Example: see GEOM_TestOthers.py
aGroup = self.GroupOp.CutGroups(theGroup1, theGroup2)
RaiseIfFailed("CutGroups", self.GroupOp)
self._autoPublish(aGroup, theName, "group")
return aGroup
## Union of list of groups.
# New group is created. It will contain all entities that are
# present in groups listed in theGList.
# @param theGList is a list of GEOM groups to create the united group from.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group.
#
# @ref tui_union_groups_anchor "Example"
@ManageTransactions("GroupOp")
def UnionListOfGroups (self, theGList, theName=None):
"""
Union of list of groups.
New group is created. It will contain all entities that are
present in groups listed in theGList.
Parameters:
theGList is a list of GEOM groups to create the united group from.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group.
"""
# Example: see GEOM_TestOthers.py
aGroup = self.GroupOp.UnionListOfGroups(theGList)
RaiseIfFailed("UnionListOfGroups", self.GroupOp)
self._autoPublish(aGroup, theName, "group")
return aGroup
## Cut of lists of groups.
# New group is created. It will contain only entities
# which are present in groups listed in theGList.
# @param theGList is a list of GEOM groups to include elements of.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group.
#
# @ref tui_intersect_groups_anchor "Example"
@ManageTransactions("GroupOp")
def IntersectListOfGroups (self, theGList, theName=None):
"""
Cut of lists of groups.
New group is created. It will contain only entities
which are present in groups listed in theGList.
Parameters:
theGList is a list of GEOM groups to include elements of.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group.
"""
# Example: see GEOM_TestOthers.py
aGroup = self.GroupOp.IntersectListOfGroups(theGList)
RaiseIfFailed("IntersectListOfGroups", self.GroupOp)
self._autoPublish(aGroup, theName, "group")
return aGroup
## Cut of lists of groups.
# New group is created. It will contain only entities
# which are present in groups listed in theGList1 but
# are not present in groups from theGList2.
# @param theGList1 is a list of GEOM groups to include elements of.
# @param theGList2 is a list of GEOM groups to exclude elements of.
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group.
#
# @ref tui_cut_groups_anchor "Example"
@ManageTransactions("GroupOp")
def CutListOfGroups (self, theGList1, theGList2, theName=None):
"""
Cut of lists of groups.
New group is created. It will contain only entities
which are present in groups listed in theGList1 but
are not present in groups from theGList2.
Parameters:
theGList1 is a list of GEOM groups to include elements of.
theGList2 is a list of GEOM groups to exclude elements of.
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group.
"""
# Example: see GEOM_TestOthers.py
aGroup = self.GroupOp.CutListOfGroups(theGList1, theGList2)
RaiseIfFailed("CutListOfGroups", self.GroupOp)
self._autoPublish(aGroup, theName, "group")
return aGroup
## Returns a list of sub-objects ID stored in the group
# @param theGroup is a GEOM group for which a list of IDs is requested
#
# @ref swig_GetObjectIDs "Example"
@ManageTransactions("GroupOp")
def GetObjectIDs(self,theGroup):
"""
Returns a list of sub-objects ID stored in the group
Parameters:
theGroup is a GEOM group for which a list of IDs is requested
"""
# Example: see GEOM_TestOthers.py
ListIDs = self.GroupOp.GetObjects(theGroup)
RaiseIfFailed("GetObjects", self.GroupOp)
return ListIDs
## Returns a type of sub-objects stored in the group
# @param theGroup is a GEOM group which type is returned.
#
# @ref swig_GetType "Example"
@ManageTransactions("GroupOp")
def GetType(self,theGroup):
"""
Returns a type of sub-objects stored in the group
Parameters:
theGroup is a GEOM group which type is returned.
"""
# Example: see GEOM_TestOthers.py
aType = self.GroupOp.GetType(theGroup)
RaiseIfFailed("GetType", self.GroupOp)
return aType
## Convert a type of geom object from id to string value
# @param theId is a GEOM obect type id.
# @return type of geom object (POINT, VECTOR, PLANE, LINE, TORUS, ... )
# @ref swig_GetType "Example"
def ShapeIdToType(self, theId):
"""
Convert a type of geom object from id to string value
Parameters:
theId is a GEOM obect type id.
Returns:
type of geom object (POINT, VECTOR, PLANE, LINE, TORUS, ... )
"""
if theId == 0:
return "COPY"
if theId == 1:
return "IMPORT"
if theId == 2:
return "POINT"
if theId == 3:
return "VECTOR"
if theId == 4:
return "PLANE"
if theId == 5:
return "LINE"
if theId == 6:
return "TORUS"
if theId == 7:
return "BOX"
if theId == 8:
return "CYLINDER"
if theId == 9:
return "CONE"
if theId == 10:
return "SPHERE"
if theId == 11:
return "PRISM"
if theId == 12:
return "REVOLUTION"
if theId == 13:
return "BOOLEAN"
if theId == 14:
return "PARTITION"
if theId == 15:
return "POLYLINE"
if theId == 16:
return "CIRCLE"
if theId == 17:
return "SPLINE"
if theId == 18:
return "ELLIPSE"
if theId == 19:
return "CIRC_ARC"
if theId == 20:
return "FILLET"
if theId == 21:
return "CHAMFER"
if theId == 22:
return "EDGE"
if theId == 23:
return "WIRE"
if theId == 24:
return "FACE"
if theId == 25:
return "SHELL"
if theId == 26:
return "SOLID"
if theId == 27:
return "COMPOUND"
if theId == 28:
return "SUBSHAPE"
if theId == 29:
return "PIPE"
if theId == 30:
return "ARCHIMEDE"
if theId == 31:
return "FILLING"
if theId == 32:
return "EXPLODE"
if theId == 33:
return "GLUED"
if theId == 34:
return "SKETCHER"
if theId == 35:
return "CDG"
if theId == 36:
return "FREE_BOUNDS"
if theId == 37:
return "GROUP"
if theId == 38:
return "BLOCK"
if theId == 39:
return "MARKER"
if theId == 40:
return "THRUSECTIONS"
if theId == 41:
return "COMPOUNDFILTER"
if theId == 42:
return "SHAPES_ON_SHAPE"
if theId == 43:
return "ELLIPSE_ARC"
if theId == 44:
return "3DSKETCHER"
if theId == 45:
return "FILLET_2D"
if theId == 46:
return "FILLET_1D"
if theId == 201:
return "PIPETSHAPE"
return "Shape Id not exist."
## Returns a main shape associated with the group
# @param theGroup is a GEOM group for which a main shape object is requested
# @return a GEOM object which is a main shape for theGroup
#
# @ref swig_GetMainShape "Example"
@ManageTransactions("GroupOp")
def GetMainShape(self,theGroup):
"""
Returns a main shape associated with the group
Parameters:
theGroup is a GEOM group for which a main shape object is requested
Returns:
a GEOM object which is a main shape for theGroup
Example of usage: BoxCopy = geompy.GetMainShape(CreateGroup)
"""
# Example: see GEOM_TestOthers.py
anObj = self.GroupOp.GetMainShape(theGroup)
RaiseIfFailed("GetMainShape", self.GroupOp)
return anObj
## Create group of edges of theShape, whose length is in range [min_length, max_length].
# If include_min/max == 0, edges with length == min/max_length will not be included in result.
# @param theShape given shape (see GEOM.GEOM_Object)
# @param min_length minimum length of edges of theShape
# @param max_length maximum length of edges of theShape
# @param include_max indicating if edges with length == max_length should be included in result, 1-yes, 0-no (default=1)
# @param include_min indicating if edges with length == min_length should be included in result, 1-yes, 0-no (default=1)
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return a newly created GEOM group of edges
#
# @@ref swig_todo "Example"
def GetEdgesByLength (self, theShape, min_length, max_length, include_min = 1, include_max = 1, theName=None):
"""
Create group of edges of theShape, whose length is in range [min_length, max_length].
If include_min/max == 0, edges with length == min/max_length will not be included in result.
Parameters:
theShape given shape
min_length minimum length of edges of theShape
max_length maximum length of edges of theShape
include_max indicating if edges with length == max_length should be included in result, 1-yes, 0-no (default=1)
include_min indicating if edges with length == min_length should be included in result, 1-yes, 0-no (default=1)
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
a newly created GEOM group of edges.
"""
edges = self.SubShapeAll(theShape, self.ShapeType["EDGE"])
edges_in_range = []
for edge in edges:
Props = self.BasicProperties(edge)
if min_length <= Props[0] and Props[0] <= max_length:
if (not include_min) and (min_length == Props[0]):
skip = 1
else:
if (not include_max) and (Props[0] == max_length):
skip = 1
else:
edges_in_range.append(edge)
if len(edges_in_range) <= 0:
print "No edges found by given criteria"
return None
# note: auto-publishing is done in self.CreateGroup()
group_edges = self.CreateGroup(theShape, self.ShapeType["EDGE"], theName)
self.UnionList(group_edges, edges_in_range)
return group_edges
## Create group of edges of selected shape, whose length is in range [min_length, max_length].
# If include_min/max == 0, edges with length == min/max_length will not be included in result.
# @param min_length minimum length of edges of selected shape
# @param max_length maximum length of edges of selected shape
# @param include_max indicating if edges with length == max_length should be included in result, 1-yes, 0-no (default=1)
# @param include_min indicating if edges with length == min_length should be included in result, 1-yes, 0-no (default=1)
# @return a newly created GEOM group of edges
# @ref swig_todo "Example"
def SelectEdges (self, min_length, max_length, include_min = 1, include_max = 1):
"""
Create group of edges of selected shape, whose length is in range [min_length, max_length].
If include_min/max == 0, edges with length == min/max_length will not be included in result.
Parameters:
min_length minimum length of edges of selected shape
max_length maximum length of edges of selected shape
include_max indicating if edges with length == max_length should be included in result, 1-yes, 0-no (default=1)
include_min indicating if edges with length == min_length should be included in result, 1-yes, 0-no (default=1)
Returns:
a newly created GEOM group of edges.
"""
nb_selected = sg.SelectedCount()
if nb_selected < 1:
print "Select a shape before calling this function, please."
return 0
if nb_selected > 1:
print "Only one shape must be selected"
return 0
id_shape = sg.getSelected(0)
shape = IDToObject( id_shape )
group_edges = self.GetEdgesByLength(shape, min_length, max_length, include_min, include_max)
left_str = " < "
right_str = " < "
if include_min: left_str = " <= "
if include_max: right_str = " <= "
self.addToStudyInFather(shape, group_edges, "Group of edges with " + `min_length`
+ left_str + "length" + right_str + `max_length`)
sg.updateObjBrowser(1)
return group_edges
# end of l3_groups
## @}
#@@ insert new functions before this line @@ do not remove this line @@#
## Create a copy of the given object
#
# @param theOriginal geometry object for copy
# @param theName Object name; when specified, this parameter is used
# for result publication in the study. Otherwise, if automatic
# publication is switched on, default value is used for result name.
#
# @return New GEOM_Object, containing the copied shape.
#
# @ingroup l1_geomBuilder_auxiliary
# @ref swig_MakeCopy "Example"
@ManageTransactions("InsertOp")
def MakeCopy(self, theOriginal, theName=None):
"""
Create a copy of the given object
Parameters:
theOriginal geometry object for copy
theName Object name; when specified, this parameter is used
for result publication in the study. Otherwise, if automatic
publication is switched on, default value is used for result name.
Returns:
New GEOM_Object, containing the copied shape.
Example of usage: Copy = geompy.MakeCopy(Box)
"""
# Example: see GEOM_TestAll.py
anObj = self.InsertOp.MakeCopy(theOriginal)
RaiseIfFailed("MakeCopy", self.InsertOp)
self._autoPublish(anObj, theName, "copy")
return anObj
## Add Path to load python scripts from
# @param Path a path to load python scripts from
# @ingroup l1_geomBuilder_auxiliary
def addPath(self,Path):
"""
Add Path to load python scripts from
Parameters:
Path a path to load python scripts from
"""
if (sys.path.count(Path) < 1):
sys.path.append(Path)
pass
pass
## Load marker texture from the file
# @param Path a path to the texture file
# @return unique texture identifier
# @ingroup l1_geomBuilder_auxiliary
@ManageTransactions("InsertOp")
def LoadTexture(self, Path):
"""
Load marker texture from the file
Parameters:
Path a path to the texture file
Returns:
unique texture identifier
"""
# Example: see GEOM_TestAll.py
ID = self.InsertOp.LoadTexture(Path)
RaiseIfFailed("LoadTexture", self.InsertOp)
return ID
## Get internal name of the object based on its study entry
# @note This method does not provide an unique identifier of the geometry object.
# @note This is internal function of GEOM component, though it can be used outside it for
# appropriate reason (e.g. for identification of geometry object).
# @param obj geometry object
# @return unique object identifier
# @ingroup l1_geomBuilder_auxiliary
def getObjectID(self, obj):
"""
Get internal name of the object based on its study entry.
Note: this method does not provide an unique identifier of the geometry object.
It is an internal function of GEOM component, though it can be used outside GEOM for
appropriate reason (e.g. for identification of geometry object).
Parameters:
obj geometry object
Returns:
unique object identifier
"""
ID = ""
entry = salome.ObjectToID(obj)
if entry is not None:
lst = entry.split(":")
if len(lst) > 0:
ID = lst[-1] # -1 means last item in the list
return "GEOM_" + ID
return ID
## Add marker texture. @a Width and @a Height parameters
# specify width and height of the texture in pixels.
# If @a RowData is @c True, @a Texture parameter should represent texture data
# packed into the byte array. If @a RowData is @c False (default), @a Texture
# parameter should be unpacked string, in which '1' symbols represent opaque
# pixels and '0' represent transparent pixels of the texture bitmap.
#
# @param Width texture width in pixels
# @param Height texture height in pixels
# @param Texture texture data
# @param RowData if @c True, @a Texture data are packed in the byte stream
# @return unique texture identifier
# @ingroup l1_geomBuilder_auxiliary
@ManageTransactions("InsertOp")
def AddTexture(self, Width, Height, Texture, RowData=False):
"""
Add marker texture. Width and Height parameters
specify width and height of the texture in pixels.
If RowData is True, Texture parameter should represent texture data
packed into the byte array. If RowData is False (default), Texture
parameter should be unpacked string, in which '1' symbols represent opaque
pixels and '0' represent transparent pixels of the texture bitmap.
Parameters:
Width texture width in pixels
Height texture height in pixels
Texture texture data
RowData if True, Texture data are packed in the byte stream
Returns:
return unique texture identifier
"""
if not RowData: Texture = PackData(Texture)
ID = self.InsertOp.AddTexture(Width, Height, Texture)
RaiseIfFailed("AddTexture", self.InsertOp)
return ID
## Creates a new folder object. It is a container for any GEOM objects.
# @param Name name of the container
# @param Father parent object. If None,
# folder under 'Geometry' root object will be created.
# @return a new created folder
# @ingroup l1_publish_data
def NewFolder(self, Name, Father=None):
"""
Create a new folder object. It is an auxiliary container for any GEOM objects.
Parameters:
Name name of the container
Father parent object. If None,
folder under 'Geometry' root object will be created.
Returns:
a new created folder
"""
if not Father: Father = self.father
return self.CreateFolder(Name, Father)
## Move object to the specified folder
# @param Object object to move
# @param Folder target folder
# @ingroup l1_publish_data
def PutToFolder(self, Object, Folder):
"""
Move object to the specified folder
Parameters:
Object object to move
Folder target folder
"""
self.MoveToFolder(Object, Folder)
pass
## Move list of objects to the specified folder
# @param ListOfSO list of objects to move
# @param Folder target folder
# @ingroup l1_publish_data
def PutListToFolder(self, ListOfSO, Folder):
"""
Move list of objects to the specified folder
Parameters:
ListOfSO list of objects to move
Folder target folder
"""
self.MoveListToFolder(ListOfSO, Folder)
pass
## @addtogroup l2_field
## @{
## Creates a field
# @param shape the shape the field lies on
# @param name the field name
# @param type type of field data: 0 - bool, 1 - int, 2 - double, 3 - string
# @param dimension dimension of the shape the field lies on
# 0 - VERTEX, 1 - EDGE, 2 - FACE, 3 - SOLID, -1 - whole shape
# @param componentNames names of components
# @return a created field
@ManageTransactions("FieldOp")
def CreateField(self, shape, name, type, dimension, componentNames):
"""
Creates a field
Parameters:
shape the shape the field lies on
name the field name
type type of field data
dimension dimension of the shape the field lies on
0 - VERTEX, 1 - EDGE, 2 - FACE, 3 - SOLID, -1 - whole shape
componentNames names of components
Returns:
a created field
"""
if isinstance( type, int ):
if type < 0 or type > 3:
raise RuntimeError, "CreateField : Error: data type must be within [0-3] range"
type = [GEOM.FDT_Bool,GEOM.FDT_Int,GEOM.FDT_Double,GEOM.FDT_String][type]
f = self.FieldOp.CreateField( shape, name, type, dimension, componentNames)
RaiseIfFailed("CreateField", self.FieldOp)
global geom
geom._autoPublish( f, "", name)
return f
## Removes a field from the GEOM component
# @param field the field to remove
def RemoveField(self, field):
"Removes a field from the GEOM component"
global geom
if isinstance( field, GEOM._objref_GEOM_Field ):
geom.RemoveObject( field )
elif isinstance( field, geomField ):
geom.RemoveObject( field.field )
else:
raise RuntimeError, "RemoveField() : the object is not a field"
return
## Returns number of fields on a shape
@ManageTransactions("FieldOp")
def CountFields(self, shape):
"Returns number of fields on a shape"
nb = self.FieldOp.CountFields( shape )
RaiseIfFailed("CountFields", self.FieldOp)
return nb
## Returns all fields on a shape
@ManageTransactions("FieldOp")
def GetFields(self, shape):
"Returns all fields on a shape"
ff = self.FieldOp.GetFields( shape )
RaiseIfFailed("GetFields", self.FieldOp)
return ff
## Returns a field on a shape by its name
@ManageTransactions("FieldOp")
def GetField(self, shape, name):
"Returns a field on a shape by its name"
f = self.FieldOp.GetField( shape, name )
RaiseIfFailed("GetField", self.FieldOp)
return f
# end of l2_field
## @}
import omniORB
# Register the new proxy for GEOM_Gen
omniORB.registerObjref(GEOM._objref_GEOM_Gen._NP_RepositoryId, geomBuilder)
## Field on Geometry
# @ingroup l2_field
class geomField( GEOM._objref_GEOM_Field ):
def __init__(self):
GEOM._objref_GEOM_Field.__init__(self)
self.field = GEOM._objref_GEOM_Field
return
## Returns the shape the field lies on
def getShape(self):
"Returns the shape the field lies on"
return self.field.GetShape(self)
## Returns the field name
def getName(self):
"Returns the field name"
return self.field.GetName(self)
## Returns type of field data as integer [0-3]
def getType(self):
"Returns type of field data"
return self.field.GetDataType(self)._v
## Returns type of field data:
# one of GEOM.FDT_Bool, GEOM.FDT_Int, GEOM.FDT_Double, GEOM.FDT_String
def getTypeEnum(self):
"Returns type of field data"
return self.field.GetDataType(self)
## Returns dimension of the shape the field lies on:
# 0 - VERTEX, 1 - EDGE, 2 - FACE, 3 - SOLID, -1 - whole shape
def getDimension(self):
"""Returns dimension of the shape the field lies on:
0 - VERTEX, 1 - EDGE, 2 - FACE, 3 - SOLID, -1 - whole shape"""
return self.field.GetDimension(self)
## Returns names of components
def getComponents(self):
"Returns names of components"
return self.field.GetComponents(self)
## Adds a time step to the field
# @param step the time step number further used as the step identifier
# @param stamp the time step time
# @param values the values of the time step
def addStep(self, step, stamp, values):
"Adds a time step to the field"
stp = self.field.AddStep( self, step, stamp )
if not stp:
raise RuntimeError, \
"Field.addStep() : Error: step %s already exists in this field"%step
global geom
geom._autoPublish( stp, "", "Step %s, %s"%(step,stamp))
self.setValues( step, values )
return stp
## Remove a time step from the field
def removeStep(self,step):
"Remove a time step from the field"
stepSO = None
try:
stepObj = self.field.GetStep( self, step )
if stepObj:
stepSO = geom.myStudy.FindObjectID( stepObj.GetStudyEntry() )
except:
#import traceback
#traceback.print_exc()
pass
self.field.RemoveStep( self, step )
if stepSO:
geom.myBuilder.RemoveObjectWithChildren( stepSO )
return
## Returns number of time steps in the field
def countSteps(self):
"Returns number of time steps in the field"
return self.field.CountSteps(self)
## Returns a list of time step IDs in the field
def getSteps(self):
"Returns a list of time step IDs in the field"
return self.field.GetSteps(self)
## Returns a time step by its ID
def getStep(self,step):
"Returns a time step by its ID"
stp = self.field.GetStep(self, step)
if not stp:
raise RuntimeError, "Step %s is missing from this field"%step
return stp
## Returns the time of the field step
def getStamp(self,step):
"Returns the time of the field step"
return self.getStep(step).GetStamp()
## Changes the time of the field step
def setStamp(self, step, stamp):
"Changes the time of the field step"
return self.getStep(step).SetStamp(stamp)
## Returns values of the field step
def getValues(self, step):
"Returns values of the field step"
return self.getStep(step).GetValues()
## Changes values of the field step
def setValues(self, step, values):
"Changes values of the field step"
stp = self.getStep(step)
errBeg = "Field.setValues(values) : Error: "
try:
ok = stp.SetValues( values )
except Exception, e:
excStr = str(e)
if excStr.find("WrongPythonType") > 0:
raise RuntimeError, errBeg +\
"wrong type of values, %s values are expected"%str(self.getTypeEnum())[4:]
raise RuntimeError, errBeg + str(e)
if not ok:
nbOK = self.field.GetArraySize(self)
nbKO = len(values)
if nbOK != nbKO:
raise RuntimeError, errBeg + "len(values) must be %s but not %s"%(nbOK,nbKO)
else:
raise RuntimeError, errBeg + "failed"
return
pass # end of class geomField
# Register the new proxy for GEOM_Field
omniORB.registerObjref(GEOM._objref_GEOM_Field._NP_RepositoryId, geomField)
## Create a new geomBuilder instance.The geomBuilder class provides the Python
# interface to GEOM operations.
#
# Typical use is:
# \code
# import salome
# salome.salome_init()
# from salome.geom import geomBuilder
# geompy = geomBuilder.New(salome.myStudy)
# \endcode
# @param study SALOME study, generally obtained by salome.myStudy.
# @param instance CORBA proxy of GEOM Engine. If None, the default Engine is used.
# @return geomBuilder instance
def New( study, instance=None):
"""
Create a new geomBuilder instance.The geomBuilder class provides the Python
interface to GEOM operations.
Typical use is:
import salome
salome.salome_init()
from salome.geom import geomBuilder
geompy = geomBuilder.New(salome.myStudy)
Parameters:
study SALOME study, generally obtained by salome.myStudy.
instance CORBA proxy of GEOM Engine. If None, the default Engine is used.
Returns:
geomBuilder instance
"""
#print "New geomBuilder ", study, instance
global engine
global geom
global doLcc
engine = instance
if engine is None:
doLcc = True
geom = geomBuilder()
assert isinstance(geom,geomBuilder), "Geom engine class is %s but should be geomBuilder.geomBuilder. Import geomBuilder before creating the instance."%geom.__class__
geom.init_geom(study)
return geom
# Register methods from the plug-ins in the geomBuilder class
plugins_var = os.environ.get( "GEOM_PluginsList" )
plugins = None
if plugins_var is not None:
plugins = plugins_var.split( ":" )
plugins=filter(lambda x: len(x)>0, plugins)
if plugins is not None:
for pluginName in plugins:
pluginBuilderName = pluginName + "Builder"
try:
exec( "from salome.%s.%s import *" % (pluginName, pluginBuilderName))
except Exception, e:
from salome_utils import verbose
print "Exception while loading %s: %s" % ( pluginBuilderName, e )
continue
exec( "from salome.%s import %s" % (pluginName, pluginBuilderName))
plugin = eval( pluginBuilderName )
# add methods from plugin module to the geomBuilder class
for k in dir( plugin ):
if k[0] == '_': continue
method = getattr( plugin, k )
if type( method ).__name__ == 'function':
if not hasattr( geomBuilder, k ):
setattr( geomBuilder, k, method )
pass
pass
del pluginName
pass
pass
|
lgpl-2.1
| -8,497,507,476,621,121,000
| 48.652687
| 169
| 0.597899
| false
| 4.442548
| true
| false
| false
|
vindar/mtools
|
tools/mtools-project.py
|
1
|
8519
|
#!/usr/bin/env python
#
# Copyright 2015 Arvind Singh
# This file is part of the mtools library.
#
# mtools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mtools If not, see <http://www.gnu.org/licenses/>.
############################################################################
# #
# script: create an empty mtools project. #
# #
############################################################################
################### main.cpp ####################
mainFile = r"""/***********************************************
* project: [PROJECT_NAME_PLH]
* date: [PROJECT_DATE_PLH]
***********************************************/
#include "mtools/mtools.hpp"
int main(int argc, char *argv[])
{
MTOOLS_SWAP_THREADS(argc,argv); // required on OSX, does nothing on Linux/Windows
mtools::parseCommandLine(argc,argv,true); // parse the command line, interactive mode
mtools::cout << "Hello World\n";
mtools::cout.getKey();
return 0;
}
/* end of file main.cpp */
"""
################### CMakeLists.txt ####################
cmakeFile = r"""################################################
# CMakeLists for project: [PROJECT_NAME_PLH]
# date: [PROJECT_DATE_PLH]
#
# generated by mtools-project.py
################################################
cmake_minimum_required(VERSION 3.10.1)
if( WIN32 )
# look for vcpkg on windows
if (DEFINED ENV{VCPKG_DIR})
string(REPLACE "\\" "/" _vcpkg_dir "$ENV{VCPKG_DIR}")
else ()
find_file( _vcpkg_exe "vcpkg.exe" PATHS ENV PATH)
if (_vcpkg_exe)
get_filename_component(_vcpkg_dir ${_vcpkg_exe} DIRECTORY)
endif()
endif()
if (_vcpkg_dir)
set(CMAKE_TOOLCHAIN_FILE "${_vcpkg_dir}/scripts/buildsystems/vcpkg.cmake")
message(STATUS "Windows: vcpkg found at [${_vcpkg_dir}]")
else()
message(STATUS "Windows: vcpkg not found.")
endif()
# only Debug and Release configurations
SET(CMAKE_CONFIGURATION_TYPES "Debug;Release;RelWithDebInfo" CACHE STRING "" FORCE)
endif()
# use the same compilers as that used for compiling mtools
set(CMAKE_CXX_COMPILER "${MTOOLS_CXX_COMPILER}" CACHE STRING "" FORCE)
set(CMAKE_C_COMPILER "${MTOOLS_C_COMPILER}" CACHE STRING "" FORCE)
project([PROJECT_NAME_PLH])
# release is the default build type
if (NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release)
endif ()
# add the project main directory as a possible location for findXXX.cmake scripts.
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR})
find_package(mtools REQUIRED)
######### external dependencies #########
# (look also for FindXXX.cmake in the project dir.)
# for exemple
# find_package(GUROBI REQUIRED)
#########################################
file(GLOB project_SRC "*.cpp" "*.hpp" "*.h")
add_executable("${PROJECT_NAME}" ${project_SRC})
target_link_libraries("${PROJECT_NAME}" PUBLIC mtools)
# compile options
if(WIN32)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /DMTOOLS_DEBUG_FLAG")
# hack for RelWithDebINfo configuration otherwise compile never ends on MSVC
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "/Zi /Gm- /Ox /Ob0 /DMTOOLS_DEBUG_FLAG")
else()
target_compile_options("${PROJECT_NAME}" PUBLIC "-std=c++17")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -DMTOOLS_DEBUG_FLAG -Wall")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -DMTOOLS_DEBUG_FLAG -Wall")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -Wall")
endif()
######### external dependencies #########
# add here other dependencies such as:
# find_package(GSL)
# target_link_libraries("${PROJECT_NAME}" PUBLIC GSL::gsl)
#########################################
# set the project as the default startup project in visual studio.
set_property(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY VS_STARTUP_PROJECT "${PROJECT_NAME}")
# move CMake specific project inside filter "CMakePredefinedTargets".
set_property(GLOBAL PROPERTY USE_FOLDERS ON)
set(PREDEFINED_TARGETS_FOLDER "CustomTargets")
message(STATUS "")
message(STATUS "")
if (NOT WIN32)
message(STATUS "Project ${PROJECT_NAME} created for make with configuration ${CMAKE_BUILD_TYPE}")
message(STATUS " - Debug : [${CMAKE_CXX_FLAGS_DEBUG}]")
message(STATUS " - RelWithDebInfo : [${CMAKE_CXX_FLAGS_RELWITHDEBINFO}]")
message(STATUS " - Release : [${CMAKE_CXX_FLAGS_RELEASE}]")
else()
message(STATUS "Project ${PROJECT_NAME} created for MSVC with configurations")
message(STATUS " - Debug : [${CMAKE_CXX_FLAGS_DEBUG}]")
message(STATUS " - RelWithDebInfo : [${CMAKE_CXX_FLAGS_RELWITHDEBINFO}]")
message(STATUS " - Release : [${CMAKE_CXX_FLAGS_RELEASE}]")
endif()
message(STATUS "")
message(STATUS "")
#end of file
"""
################### clean_build.py ####################
cleanbuildFile = r"""#!/usr/bin/env python
#
# project: [PROJECT_NAME_PLH]
# date: [PROJECT_DATE_PLH]
# script that cleans the /build sub-directory
#
import shutil
import os
#import time
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
if (os.path.isdir("build")):
shutil.rmtree('build')
# time.sleep(1)
os.makedirs('build')
with open('build/build_directory','w') as out:
out.write('This directory (will) contain the CMake generated project files.')
"""
################### run_cmake.py ####################
runcmakeFile = r"""#!/usr/bin/env python
#
# project: [PROJECT_NAME_PLH]
# date: [PROJECT_DATE_PLH]
# Invoque cmake to build the project.
# usage: ./run_cmake [CMAKE_OPTIONS...]
#
import sys
import os
import subprocess
carg = sys.argv
del carg[0]
carg.insert(0,'cmake');
carg.append('..');
# on windows, we build x64 binaries
if sys.platform.startswith('win32'):
carg.insert(1,'-A');
carg.insert(2,'x64');
# invoque cmake with the correct arguments
if (not os.path.exists('build')):
os.makedirs('build')
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname + "/build")
subprocess.call(carg)
"""
############################################################################
# the python script
import os
import shutil
import sys
import datetime
import subprocess
# Python 2 and 3:
from builtins import input
myinput = input
# display an error msg
def error(msg):
print("*** ERROR ***")
print(msg)
raw_input("Press Enter to continue...")
sys.exit(0)
# make replacement in string then save the file
def repl(str,filename):
str = str.replace("[PROJECT_NAME_PLH]",project_name)
str = str.replace("[PROJECT_DATE_PLH]",project_date)
filepath = project_dir + "/" + filename
try:
fout = open(filepath,"w")
fout.write(str);
fout.close()
except:
error("cannot write file [" + filepath + "]")
# get the date
project_date = str(datetime.date.today())
# get the project name
if (len(sys.argv) > 1):
project_name = sys.argv[1]
else:
project_name = myinput("Name of the project to create ? ")
# create the project directory
project_dir = os.getcwd() + "/" + project_name
project_build = project_dir + "/build"
if os.path.exists(project_dir):
error("directory [" + project_dir + "] already exist")
try:
os.makedirs(project_dir)
except:
error("cannot create project directory [" + project_dir + "]")
# copy the files
repl(mainFile,"main.cpp")
repl(cmakeFile,"CMakeLists.txt")
repl(runcmakeFile,"run_cmake.py")
repl(cleanbuildFile,"clean_build.py")
os.chdir(project_dir)
os.system("python clean_build.py")
# uncomment below to run cmake right way.
#os.system("python run_cmake.py")
print("\n*** Project " + project_name + " created ! ***")
if sys.platform.startswith('win32'):
myinput("Press Enter to continue...")
# end of script mtools-project.py
############################################################################
|
gpl-3.0
| -2,650,942,873,968,966,000
| 25.538941
| 98
| 0.620965
| false
| 3.335552
| false
| false
| false
|
adieyal/billtracker
|
code/billtracker/scrapers/models.py
|
1
|
3436
|
from django.db import models
from django.conf import settings
import bills.models as bill_models
class GovInfoScraper(models.Model):
bill_name = models.CharField(max_length=100)
bill_code = models.CharField(max_length=10)
comment_startdate = models.DateField()
comment_enddate = models.DateField()
scrape_date = models.DateTimeField(auto_now_add=True)
url = models.URLField(null=True, blank=True)
reviewed = models.BooleanField(default=False)
def convert_to_bill(self):
if self.reviewed:
raise bill_models.BillException("Cannot re-convert once already converted")
bill = bill_models.Bill.objects.create(
name=self.bill_name,
code=self.bill_code,
)
bill_models.PreparliamentaryStage.objects.create(
bill=bill,
comments_start=self.comment_startdate,
comments_end=self.comment_enddate,
document_url=self.url
)
self.reviewed = True
self.save()
return bill
def __unicode__(self):
return "[%s] %s" % (self.bill_code, self.bill_name)
class BillsBeforeParliamentScraper(models.Model):
bill_name = models.CharField(max_length=100)
bill_code = models.CharField(max_length=10)
introduced_by = models.CharField(max_length=100)
date_introduced = models.DateField()
bill_stage = models.CharField(max_length=3, choices=[
("1", "National Assembly"),
("2", "NCOP"),
("3", "Sent to President"),
("4", "Finalised in an Act"),
("5", "Withdrawn"),
])
document_number = models.CharField(max_length=10)
url = models.URLField(null=True, blank=True)
committee = models.CharField(max_length=100, null=True, blank=True)
reviewed = models.BooleanField(default=False)
# TODO - add NCOP and Presidential stages
def convert_to_bill(self):
if self.reviewed:
raise bill_models.BillException("Cannot re-convert once already converted")
try:
bill = bill_models.Bill.objects.get(code=self.bill_code)
except bill_models.Bill.DoesNotExist:
bill = bill_models.Bill.objects.create(
name=self.bill_name,
code=self.bill_code,
)
bill_models.ParliamentIntroduction.objects.create(
bill=bill,
introduced_by=self.introduced_by,
date_introduced=self.date_introduced,
document_number=self.document_number,
url=self.url
)
if self.committee:
bill_models.ParliamentPortfolioCommittee.objects.create(
bill=bill,
committee=self.committee
)
self.reviewed = True
self.save()
return bill
def __unicode__(self):
return "[%s] %s" % (self.bill_code, self.bill_name)
class Meta:
verbose_name_plural = "Bills before parliament"
verbose_name = "Bills before parliament"
class ParliamentMinutesScraper(models.Model):
filename = models.FileField(upload_to=settings.DIR_PARLIAMENT_MINUTES)
house = models.CharField(max_length=20)
language = models.CharField(max_length=20)
date = models.DateField()
scrape_date = models.DateTimeField(auto_now_add=True)
url = models.URLField()
def __unicode__(self):
return "%s - %s" % (self.scrape_date, self.house)
|
bsd-3-clause
| -3,848,321,381,667,851,000
| 32.686275
| 87
| 0.625146
| false
| 3.613039
| false
| false
| false
|
tamentis/psutil
|
examples/process_detail.py
|
1
|
4156
|
#!/usr/bin/env python
#
# $Id$
#
# Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Print detailed information about a process.
"""
import os
import datetime
import socket
import sys
import psutil
def convert_bytes(n):
symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
return "%sB" % n
def print_(a, b):
if sys.stdout.isatty() and os.name == 'posix':
fmt = '\x1b[1;32m%-17s\x1b[0m %s' %(a, b)
else:
fmt = '%-15s %s' %(a, b)
# python 2/3 compatibility layer
sys.stdout.write(fmt + '\n')
sys.stdout.flush()
def run(pid):
p = psutil.Process(pid)
if p.parent:
parent = '(%s)' % p.parent.name
else:
parent = ''
started = datetime.datetime.fromtimestamp(p.create_time).strftime('%Y-%M-%d %H:%M')
if hasattr(p, 'get_io_counters'):
io = p.get_io_counters()
mem = p.get_memory_info()
mem = '%s%% (resident=%s, virtual=%s) ' %(round(p.get_memory_percent(), 1),
convert_bytes(mem.rss),
convert_bytes(mem.vms))
cpu_times = p.get_cpu_times()
cpu_percent = p.get_cpu_percent(0)
children = p.get_children()
files = p.get_open_files()
threads = p.get_threads()
connections = p.get_connections()
print_('pid', p.pid)
print_('name', p.name)
print_('exe', p.exe)
print_('parent', '%s %s' % (p.ppid, parent))
print_('cmdline', ' '.join(p.cmdline))
print_('started', started)
print_('user', p.username)
if os.name == 'posix':
print_('uids', 'real=%s, effective=%s, saved=%s' % p.uids)
print_('gids', 'real=%s, effective=%s, saved=%s' % p.gids)
print_('terminal', p.terminal or '')
if hasattr(p, 'getcwd'):
print_('cwd', p.getcwd())
print_('memory', mem)
print_('cpu', '%s%% (user=%s, system=%s)' % (cpu_percent,
cpu_times.user,
cpu_times.system))
print_('status', p.status)
print_('niceness', p.nice)
print_('num threads', p.get_num_threads())
if hasattr(p, 'get_io_counters'):
print_('I/O', 'bytes-read=%s, bytes-written=%s' % \
(convert_bytes(io.read_bytes),
convert_bytes(io.write_bytes)))
if children:
print_('children', '')
for child in children:
print_('', 'pid=%s name=%s' % (child.pid, child.name))
if files:
print_('open files', '')
for file in files:
print_('', 'fd=%s %s ' % (file.fd, file.path))
if threads:
print_('running threads', '')
for thread in threads:
print_('', 'id=%s, user-time=%s, sys-time=%s' \
% (thread.id, thread.user_time, thread.system_time))
if connections:
print_('open connections', '')
for conn in connections:
if conn.type == socket.SOCK_STREAM:
type = 'TCP'
elif conn.type == socket.SOCK_DGRAM:
type = 'UDP'
else:
type = 'UNIX'
lip, lport = conn.local_address
if not conn.remote_address:
rip, rport = '*', '*'
else:
rip, rport = conn.remote_address
print_('', '%s:%s -> %s:%s type=%s status=%s' \
% (lip, lport, rip, rport, type, conn.status))
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
sys.exit(run(os.getpid()))
elif len(argv) == 2:
sys.exit(run(int(argv[1])))
else:
sys.exit('usage: %s [pid]' % __file__)
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
| 8,451,998,020,740,110,000
| 31.46875
| 87
| 0.492541
| false
| 3.434711
| false
| false
| false
|
mupen64plus/mupen64plus-ui-python
|
src/m64py/frontend/dialogs.py
|
1
|
2520
|
# -*- coding: utf-8 -*-
# Author: Milan Nikolic <gen2brain@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QDialog, QMessageBox, QListWidgetItem
from m64py.utils import version_split
from m64py.core.defs import FRONTEND_VERSION
try:
from m64py.ui.about_ui import Ui_AboutDialog
from m64py.ui.license_ui import Ui_LicenseDialog
from m64py.ui.archive_ui import Ui_ArchiveDialog
except ModuleNotFoundError:
sys.stderr.write("You have to run setup.py build first\n")
sys.exit(1)
class AboutDialog(QDialog, Ui_AboutDialog):
def __init__(self, parent):
QDialog.__init__(self, parent)
self.setupUi(self)
if parent.worker.core.core_version != "Unknown":
version = version_split(parent.worker.core.core_version)
else:
version = "Unknown"
text = self.labelAbout.text()
text = text.replace("FRONTEND_VERSION", FRONTEND_VERSION)
text = text.replace("CORE_VERSION", version)
self.labelAbout.setText(text)
self.show()
class LicenseDialog(QDialog, Ui_LicenseDialog):
def __init__(self, parent):
QDialog.__init__(self, parent)
self.setupUi(self)
self.show()
class InfoDialog(QMessageBox):
def __init__(self, parent=None, text=None):
QMessageBox.__init__(self, parent)
self.setText(text)
self.setWindowTitle("Info")
self.show()
class ArchiveDialog(QDialog, Ui_ArchiveDialog):
def __init__(self, parent, files):
QDialog.__init__(self, parent)
self.setupUi(self)
self.build_list(files)
def build_list(self, files):
self.listWidget.clear()
for fname in files:
item = QListWidgetItem(fname)
item.setData(Qt.UserRole, fname)
self.listWidget.addItem(item)
self.listWidget.setCurrentRow(0)
|
gpl-3.0
| -3,016,938,212,530,211,000
| 32.6
| 71
| 0.678968
| false
| 3.772455
| false
| false
| false
|
Joshuaalbert/IonoTomo
|
src/ionotomo/inversion/gradient_and_adjoint.py
|
1
|
13841
|
'''The gradient for steepest direction, i.e. <Cm, d/dm(-log(posterior))>
is equal to Adjoint(G).(g(m) - d_obs) + (m - m_prior) = Cm.G^t.Cd^-1 .( g(m) - d_obs ) + (m - m_prior)'''
from ionotomo.geometry.tri_cubic import bisection
import numpy as np
from scipy.integrate import simps
import dask.array as da
from dask import delayed
from dask.multiprocessing import get
from ionotomo.ionosphere.covariance import Covariance
def do_adjoint(rays, dd, K_ne, m_tci, sigma_m, Nkernel, size_cell, i0):
#print("Doing gradient")
L_m = Nkernel*size_cell
#if antennas parallelization Nt,Nd
#if directions parallelization Na,Nd
N1,N2,_,Ns = rays.shape
m_shape = [N1,N2,m_tci.nx,m_tci.ny,m_tci.nz]
grad = np.zeros([m_tci.nx,m_tci.ny,m_tci.nz],dtype=np.double)
mask = np.zeros(m_shape, dtype=np.bool)
idx_min = np.ones(m_shape,dtype=np.int64)*Ns
idx_max = np.ones(m_shape,dtype=np.int64)*-1
nevec = np.zeros([N1,N2,Ns],dtype=np.double)
#go through the mask
# X,Y,Z = np.meshgrid(np.arange(m_tci.xvec.size),
# np.arange(m_tci.yvec.size),
# np.arange(m_tci.zvec.size),indexing='ij')
j = 0
while j < N1:
k = 0
while k < N2:
x_ray = rays[j,k,0,:]
y_ray = rays[j,k,1,:]
z_ray = rays[j,k,2,:]
s_ray = rays[j,k,3,:]
nevec[j,k,:] = K_ne*np.exp(m_tci.interp(x_ray,y_ray,z_ray))/1e13
idx = 0
while idx < Ns:
#nevec[j,k,idx] = K_ne*np.exp(m_tci.interp(x_ray[idx],y_ray[idx],z_ray[idx]))/1e13
xi,yi,zi = bisection(m_tci.xvec,x_ray[idx]),bisection(m_tci.yvec,y_ray[idx]),bisection(m_tci.zvec,z_ray[idx])
local_mask = (j,k,slice(max(0,xi - Nkernel), min(m_tci.nx - 1, xi + Nkernel + 1)),
slice(max(0,yi - Nkernel) , min(m_tci.ny - 1,yi + Nkernel + 1)),
slice(max(0, zi - Nkernel), min(m_tci.nz - 1, zi + Nkernel + 1)))
mask[local_mask] = True
shape = mask[local_mask].shape
idx_max[local_mask] = np.max(np.stack([idx_max[local_mask],
np.ones(shape,dtype=np.int64)*idx],axis=-1),axis=-1)
#print(idx_max[local_mask])
idx_min[local_mask] = np.min(np.stack([idx_min[local_mask],
np.ones(shape,dtype=np.int64)*idx],axis=-1),axis=-1)
idx += 1
k += 1
j += 1
sum_mask = np.sum(np.sum(mask,axis=0),axis=0)
xi = 0
while xi < m_tci.nx:
yi = 0
while yi < m_tci.ny:
zi = 0
while zi < m_tci.nz:
if not sum_mask[xi,yi,zi]:
zi += 1
continue
x,y,z = m_tci.xvec[xi],m_tci.yvec[yi],m_tci.zvec[zi]
j = 0
while j < N2:
i = 0
while i < N1:
x_ray = rays[i,j,0,:]
y_ray = rays[i,j,1,:]
z_ray = rays[i,j,2,:]
s_ray = rays[i,j,3,:]
ne = nevec[i,j,:]
if mask[i,j,xi,yi,zi]:
segment_mask = (slice(idx_min[i,j,xi,yi,zi],idx_max[i,j,xi,yi,zi]+1),)
dx = x - x_ray[segment_mask]
dy = y - y_ray[segment_mask]
dz = z - z_ray[segment_mask]
Cm = dx**2
dy *= dy
dz *= dz
Cm += dy
Cm += dz
#np.sqrt(Cm,out=Cm)
Cm /= -2.*L_m**2
np.exp(Cm,out=Cm)
Cm *= sigma_m**2
Cm *= ne[segment_mask]
comp = simps(Cm*dd[i,j],s_ray[segment_mask])
grad[xi,yi,zi] += comp
# if i == i0:
# grad[xi,yi,zi] -= N1*comp
i += 1
j += 1
zi += 1
yi += 1
xi += 1
grad[:,:,:] -= grad[i0,:,:]
return grad
def compute_adjoint_dask(rays, g, dobs, i0, K_ne, m_tci, m_prior, CdCt, sigma_m, Nkernel, size_cell):
L_m = Nkernel*size_cell
# #i not eq i0 mask
# mask = np.ones(rays.shape[0],dtype=np.bool)
# mask[i0] = False
# rays = rays[mask,:,:,:,:]
# g = g[mask,:,:]
# dobs = dobs[mask,:,:]
# CdCt = CdCt[mask,:,:]
#residuals
#g.shape, dobs.shape [Na,Nt,Nd]
dd = g - dobs
#weighted residuals
#Cd.shape [Na,Nt,Nd] i.e. diagonal
#CdCt^-1 = 1./CdCt
dd /= (CdCt + 1e-15)
#get ray info
Na, Nt, Nd, _ ,Ns = rays.shape
#parallelize over directions
gradient = da.sum(da.stack([da.from_delayed(delayed(do_adjoint)(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
sigma_m, Nkernel, size_cell, i0),(m_tci.nx,m_tci.ny,m_tci.nz),dtype=np.double) for d in range(Nd)],axis=-1),axis=-1)
gradient = gradient.compute(get=get)
gradient += m_tci.M
gradient -= m_prior
return gradient
def compute_adjoint(rays, g, dobs, i0, K_ne, m_tci, m_prior, CdCt, sigma_m, Nkernel, size_cell):
L_m = Nkernel*size_cell
# #i not eq i0 mask
# mask = np.ones(rays.shape[0],dtype=np.bool)
# mask[i0] = False
# rays = rays[mask,:,:,:,:]
# g = g[mask,:,:]
# dobs = dobs[mask,:,:]
# CdCt = CdCt[mask,:,:]
#residuals
#g.shape, dobs.shape [Na,Nt,Nd]
dd = g - dobs
#weighted residuals
#Cd.shape [Na,Nt,Nd] i.e. diagonal
#CdCt^-1 = 1./CdCt
dd /= (CdCt + 1e-15)
#get ray info
Na, Nt, Nd, _ ,Ns = rays.shape
# if Na < Nd:
# #parallelize over antennas
# gradient = np.sum(np.stack([do_gradient(rays[i,:,:,:,:], dd[i,:,:], K_ne, m_tci,
# sigma_m, Nkernel, size_cell) for i in range(Na)],axis=-1),axis=-1)
# else:
# #parallelize over directions
# gradient = np.sum(np.stack([do_gradient(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
# sigma_m, Nkernel, size_cell) for d in range(Nd)],axis=-1),axis=-1)
#parallelize over directions
gradient = np.sum(np.stack([do_adjoint(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
sigma_m, Nkernel, size_cell,i0) for d in range(Nd)],axis=-1),axis=-1)
gradient += m_tci.M
gradient -= m_prior
return gradient
def do_gradient(rays, dd, K_ne, m_tci, sigma_m, Nkernel, size_cell, i0):
'''Gradient of S is G^t.CdCt^-1.(g-dobs) + Cm^-1.(m - mprior)'''
adjoint = do_adjoint(rays, dd, K_ne, m_tci, sigma_m, Nkernel, size_cell, i0)
# Nkernel=0
# #print("Doing gradient")
# L_m = Nkernel*size_cell
# #if antennas parallelization Nt,Nd
# #if directions parallelization Na,Nd
# N1,N2,_,Ns = rays.shape
# m_shape = [N1,N2,m_tci.nx,m_tci.ny,m_tci.nz]
# grad = np.zeros([m_tci.nx,m_tci.ny,m_tci.nz],dtype=np.double)
#
# mask = np.zeros(m_shape, dtype=np.bool)
# #idx_min = np.ones(m_shape,dtype=np.int64)*Ns
# #idx_max = np.ones(m_shape,dtype=np.int64)*-1
# #nevec = np.zeros([N1,N2,Ns],dtype=np.double)
# #go through the mask
# j = 0
# while j < N1:
# k = 0
# while k < N2:
# x_ray = rays[j,k,0,:]
# y_ray = rays[j,k,1,:]
# z_ray = rays[j,k,2,:]
# s_ray = rays[j,k,3,:]
# idx = 0
# while idx < Ns:
# #nevec[j,k,idx] = K_ne*np.exp(m_tci.interp(x_ray[idx],y_ray[idx],z_ray[idx]))/1e16
# xi,yi,zi = bisection(m_tci.xvec,x_ray[idx]),bisection(m_tci.yvec,y_ray[idx]),bisection(m_tci.zvec,z_ray[idx])
# local_mask = (j,k,slice(max(0,xi - Nkernel), min(m_tci.nx - 1, xi + Nkernel + 1)),
# slice(max(0,yi - Nkernel) , min(m_tci.ny - 1,yi + Nkernel + 1)),
# slice(max(0, zi - Nkernel), min(m_tci.nz - 1, zi + Nkernel + 1)))
# mask[local_mask] = True
# shape = mask[local_mask].shape
## idx_max[local_mask] = np.max(np.stack([idx_max[local_mask],
## np.ones(shape,dtype=np.int64)*idx],axis=-1),axis=-1)
## #print(idx_max[local_mask])
## idx_min[local_mask] = np.min(np.stack([idx_min[local_mask],
## np.ones(shape,dtype=np.int64)*idx],axis=-1),axis=-1)
# idx += 1
# k += 1
# j += 1
#
# #Cm^-1 (m-mprior)
# dmpart = np.zeros([m_tci.nx,m_tci.ny,m_tci.nz],dtype=np.double)
# sum_mask = np.sum(np.sum(mask,axis=0),axis=0)#is there any ray in the cell at all?
# xi = 0
# while xi < m_tci.nx:
# yi = 0
# while yi < m_tci.ny:
# zi = 0
# while zi < m_tci.nz:
# if not sum_mask[xi,yi,zi]:
# zi += 1
# continue
# x,y,z = m_tci.xvec[xi],m_tci.yvec[yi],m_tci.zvec[zi]
# j = 0
# while j < N2:
# i = 0
# while i < N1:
# paircomp = 0.
# if mask[i,j,xi,yi,zi]:
# paircomp = 1.
# if mask[i0,j,xi,yi,zi]:
# paircomp -= 1.
# grad[xi,yi,zi] += dd[i,j]*paircomp*K_ne*np.exp(m_tci.interp(m_tci.xvec[xi],
# m_tci.yvec[yi],
# m_tci.zvec[zi]))/1e12
#
#
# i += 1
# j += 1
# zi += 1
# yi += 1
# xi += 1
# return grad
def compute_gradient_dask(rays, g, dobs, i0, K_ne, m_tci, m_prior, CdCt, sigma_m, Nkernel, size_cell, cov_obj=None):
L_m = Nkernel*size_cell
# #i not eq i0 mask
# mask = np.ones(rays.shape[0],dtype=np.bool)
# mask[i0] = False
# rays = rays[mask,:,:,:,:]
# g = g[mask,:,:]
# dobs = dobs[mask,:,:]
# CdCt = CdCt[mask,:,:]
#residuals
#g.shape, dobs.shape [Na,Nt,Nd]
dd = g - dobs
#weighted residuals
#Cd.shape [Na,Nt,Nd] i.e. diagonal
#CdCt^-1 = 1./CdCt
dd /= (CdCt + 1e-15)
#get ray info
Na, Nt, Nd, _ ,Ns = rays.shape
# if Na < Nd:
# #parallelize over antennas
# gradient = da.sum(da.stack([da.from_delayed(delayed(do_gradient)(rays[i,:,:,:,:], dd[i,:,:], K_ne, m_tci,
# sigma_m, Nkernel, size_cell),(m_tci.nx,m_tci.ny,m_tci.nz),dtype=np.double) for i in range(Na)],axis=-1),axis=-1)
# else:
# #parallelize over directions
# gradient = da.sum(da.stack([da.from_delayed(delayed(do_gradient)(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
# sigma_m, Nkernel, size_cell),(m_tci.nx,m_tci.ny,m_tci.nz),dtype=np.double) for d in range(Nd)],axis=-1),axis=-1)
#parallelize over directions
gradient = da.sum(da.stack([da.from_delayed(delayed(do_gradient)(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
sigma_m, Nkernel, size_cell, i0),(m_tci.nx,m_tci.ny,m_tci.nz),dtype=np.double) for d in range(Nd)],axis=-1),axis=-1)
gradient = gradient.compute(get=get)
if cov_obj is not None:
dm = m_tci.M - m_prior
gradient + cov_obj.contract(dm)
#gradient += m_tci.M
#gradient -= m_prior
return gradient
def compute_gradient(rays, g, dobs, i0, K_ne, m_tci, m_prior, CdCt, sigma_m, Nkernel, size_cell, cov_obj=None):
L_m = Nkernel*size_cell
# #i not eq i0 mask
# mask = np.ones(rays.shape[0],dtype=np.bool)
# mask[i0] = False
# rays = rays[mask,:,:,:,:]
# g = g[mask,:,:]
# dobs = dobs[mask,:,:]
# CdCt = CdCt[mask,:,:]
#residuals
#g.shape, dobs.shape [Na,Nt,Nd]
dd = g - dobs
#weighted residuals
#Cd.shape [Na,Nt,Nd] i.e. diagonal
#CdCt^-1 = 1./CdCt
dd /= (CdCt + 1e-15)
#get ray info
Na, Nt, Nd, _ ,Ns = rays.shape
# if Na < Nd:
# #parallelize over antennas
# gradient = np.sum(np.stack([do_gradient(rays[i,:,:,:,:], dd[i,:,:], K_ne, m_tci,
# sigma_m, Nkernel, size_cell) for i in range(Na)],axis=-1),axis=-1)
# else:
# #parallelize over directions
# gradient = np.sum(np.stack([do_gradient(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
# sigma_m, Nkernel, size_cell) for d in range(Nd)],axis=-1),axis=-1)
#parallelize over directions
gradient = np.sum(np.stack([do_gradient(rays[:,:,d,:,:], dd[:,:,d], K_ne, m_tci,
sigma_m, Nkernel, size_cell,i0) for d in range(Nd)],axis=-1),axis=-1)
if cov_obj is not None:
dm = m_tci.M - m_prior
gradient + cov_obj.contract(dm)
#gradient += m_tci.M
#gradient -= m_prior
return gradient
|
apache-2.0
| 3,531,586,930,173,808,000
| 40.457055
| 156
| 0.449173
| false
| 2.814355
| false
| false
| false
|
freerangerouting/frr
|
tests/topotests/lib/common_config.py
|
1
|
120562
|
#
# Copyright (c) 2019 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation, Inc.
# ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
from collections import OrderedDict
from datetime import datetime
from time import sleep
from copy import deepcopy
from subprocess import call
from subprocess import STDOUT as SUB_STDOUT
from subprocess import PIPE as SUB_PIPE
from subprocess import Popen
from functools import wraps
from re import search as re_search
from tempfile import mkdtemp
import StringIO
import os
import sys
import ConfigParser
import traceback
import socket
import ipaddress
from lib.topolog import logger, logger_config
from lib.topogen import TopoRouter, get_topogen
from lib.topotest import interface_set_status
FRRCFG_FILE = "frr_json.conf"
FRRCFG_BKUP_FILE = "frr_json_initial.conf"
ERROR_LIST = ["Malformed", "Failure", "Unknown", "Incomplete"]
ROUTER_LIST = []
####
CD = os.path.dirname(os.path.realpath(__file__))
PYTESTINI_PATH = os.path.join(CD, "../pytest.ini")
# Creating tmp dir with testsuite name to avoid conflict condition when
# multiple testsuites run together. All temporary files would be created
# in this dir and this dir would be removed once testsuite run is
# completed
LOGDIR = "/tmp/topotests/"
TMPDIR = None
# NOTE: to save execution logs to log file frrtest_log_dir must be configured
# in `pytest.ini`.
config = ConfigParser.ConfigParser()
config.read(PYTESTINI_PATH)
config_section = "topogen"
if config.has_option("topogen", "verbosity"):
loglevel = config.get("topogen", "verbosity")
loglevel = loglevel.upper()
else:
loglevel = "INFO"
if config.has_option("topogen", "frrtest_log_dir"):
frrtest_log_dir = config.get("topogen", "frrtest_log_dir")
time_stamp = datetime.time(datetime.now())
logfile_name = "frr_test_bgp_"
frrtest_log_file = frrtest_log_dir + logfile_name + str(time_stamp)
print("frrtest_log_file..", frrtest_log_file)
logger = logger_config.get_logger(
name="test_execution_logs", log_level=loglevel, target=frrtest_log_file
)
print("Logs will be sent to logfile: {}".format(frrtest_log_file))
if config.has_option("topogen", "show_router_config"):
show_router_config = config.get("topogen", "show_router_config")
else:
show_router_config = False
# env variable for setting what address type to test
ADDRESS_TYPES = os.environ.get("ADDRESS_TYPES")
# Saves sequence id numbers
SEQ_ID = {"prefix_lists": {}, "route_maps": {}}
def get_seq_id(obj_type, router, obj_name):
"""
Generates and saves sequence number in interval of 10
Parameters
----------
* `obj_type`: prefix_lists or route_maps
* `router`: router name
*` obj_name`: name of the prefix-list or route-map
Returns
--------
Sequence number generated
"""
router_data = SEQ_ID[obj_type].setdefault(router, {})
obj_data = router_data.setdefault(obj_name, {})
seq_id = obj_data.setdefault("seq_id", 0)
seq_id = int(seq_id) + 10
obj_data["seq_id"] = seq_id
return seq_id
def set_seq_id(obj_type, router, id, obj_name):
"""
Saves sequence number if not auto-generated and given by user
Parameters
----------
* `obj_type`: prefix_lists or route_maps
* `router`: router name
*` obj_name`: name of the prefix-list or route-map
"""
router_data = SEQ_ID[obj_type].setdefault(router, {})
obj_data = router_data.setdefault(obj_name, {})
seq_id = obj_data.setdefault("seq_id", 0)
seq_id = int(seq_id) + int(id)
obj_data["seq_id"] = seq_id
class InvalidCLIError(Exception):
"""Raise when the CLI command is wrong"""
pass
def run_frr_cmd(rnode, cmd, isjson=False):
"""
Execute frr show commands in priviledged mode
* `rnode`: router node on which commands needs to executed
* `cmd`: Command to be executed on frr
* `isjson`: If command is to get json data or not
:return str:
"""
if cmd:
ret_data = rnode.vtysh_cmd(cmd, isjson=isjson)
if True:
if isjson:
logger.debug(ret_data)
print_data = rnode.vtysh_cmd(cmd.rstrip("json"), isjson=False)
else:
print_data = ret_data
logger.info(
"Output for command [ %s] on router %s:\n%s",
cmd.rstrip("json"),
rnode.name,
print_data,
)
return ret_data
else:
raise InvalidCLIError("No actual cmd passed")
def apply_raw_config(tgen, input_dict):
"""
API to configure raw configuration on device. This can be used for any cli
which does has not been implemented in JSON.
Parameters
----------
* `tgen`: tgen onject
* `input_dict`: configuration that needs to be applied
Usage
-----
input_dict = {
"r2": {
"raw_config": [
"router bgp",
"no bgp update-group-split-horizon"
]
}
}
Returns
-------
True or errormsg
"""
result = True
for router_name in input_dict.keys():
config_cmd = input_dict[router_name]["raw_config"]
if not isinstance(config_cmd, list):
config_cmd = [config_cmd]
frr_cfg_file = "{}/{}/{}".format(TMPDIR, router_name, FRRCFG_FILE)
with open(frr_cfg_file, "w") as cfg:
for cmd in config_cmd:
cfg.write("{}\n".format(cmd))
result = load_config_to_router(tgen, router_name)
return result
def create_common_configuration(
tgen, router, data, config_type=None, build=False, load_config=True
):
"""
API to create object of class FRRConfig and also create frr_json.conf
file. It will create interface and common configurations and save it to
frr_json.conf and load to router
Parameters
----------
* `tgen`: tgen onject
* `data`: Congiguration data saved in a list.
* `router` : router id to be configured.
* `config_type` : Syntactic information while writing configuration. Should
be one of the value as mentioned in the config_map below.
* `build` : Only for initial setup phase this is set as True
Returns
-------
True or False
"""
TMPDIR = os.path.join(LOGDIR, tgen.modname)
fname = "{}/{}/{}".format(TMPDIR, router, FRRCFG_FILE)
config_map = OrderedDict(
{
"general_config": "! FRR General Config\n",
"interface_config": "! Interfaces Config\n",
"static_route": "! Static Route Config\n",
"prefix_list": "! Prefix List Config\n",
"bgp_community_list": "! Community List Config\n",
"route_maps": "! Route Maps Config\n",
"bgp": "! BGP Config\n",
"vrf": "! VRF Config\n",
}
)
if build:
mode = "a"
elif not load_config:
mode = "a"
else:
mode = "w"
try:
frr_cfg_fd = open(fname, mode)
if config_type:
frr_cfg_fd.write(config_map[config_type])
for line in data:
frr_cfg_fd.write("{} \n".format(str(line)))
frr_cfg_fd.write("\n")
except IOError as err:
logger.error(
"Unable to open FRR Config File. error(%s): %s" % (err.errno, err.strerror)
)
return False
finally:
frr_cfg_fd.close()
# If configuration applied from build, it will done at last
if not build and load_config:
load_config_to_router(tgen, router)
return True
def kill_router_daemons(tgen, router, daemons):
"""
Router's current config would be saved to /etc/frr/ for each deamon
and deamon would be killed forcefully using SIGKILL.
* `tgen` : topogen object
* `router`: Device under test
* `daemons`: list of daemons to be killed
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
try:
router_list = tgen.routers()
# Saving router config to /etc/frr, which will be loaded to router
# when it starts
router_list[router].vtysh_cmd("write memory")
# Kill Daemons
result = router_list[router].killDaemons(daemons)
if len(result) > 0:
assert "Errors found post shutdown - details follow:" == 0, result
return result
except Exception as e:
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
def start_router_daemons(tgen, router, daemons):
"""
Daemons defined by user would be started
* `tgen` : topogen object
* `router`: Device under test
* `daemons`: list of daemons to be killed
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
try:
router_list = tgen.routers()
# Start daemons
result = router_list[router].startDaemons(daemons)
return result
except Exception as e:
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def kill_mininet_routers_process(tgen):
"""
Kill all mininet stale router' processes
* `tgen` : topogen object
"""
router_list = tgen.routers()
for rname, router in router_list.iteritems():
daemon_list = [
"zebra",
"ospfd",
"ospf6d",
"bgpd",
"ripd",
"ripngd",
"isisd",
"pimd",
"ldpd",
"staticd",
]
for daemon in daemon_list:
router.run("killall -9 {}".format(daemon))
def check_router_status(tgen):
"""
Check if all daemons are running for all routers in topology
* `tgen` : topogen object
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
try:
router_list = tgen.routers()
for router, rnode in router_list.iteritems():
result = rnode.check_router_running()
if result != "":
daemons = []
if "bgpd" in result:
daemons.append("bgpd")
if "zebra" in result:
daemons.append("zebra")
rnode.startDaemons(daemons)
except Exception as e:
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def reset_config_on_routers(tgen, routerName=None):
"""
Resets configuration on routers to the snapshot created using input JSON
file. It replaces existing router configuration with FRRCFG_BKUP_FILE
Parameters
----------
* `tgen` : Topogen object
* `routerName` : router config is to be reset
"""
logger.debug("Entering API: reset_config_on_routers")
router_list = tgen.routers()
for rname in ROUTER_LIST:
if routerName and routerName != rname:
continue
router = router_list[rname]
logger.info("Configuring router %s to initial test configuration", rname)
cfg = router.run("vtysh -c 'show running'")
fname = "{}/{}/frr.sav".format(TMPDIR, rname)
dname = "{}/{}/delta.conf".format(TMPDIR, rname)
f = open(fname, "w")
for line in cfg.split("\n"):
line = line.strip()
if (
line == "Building configuration..."
or line == "Current configuration:"
or not line
):
continue
f.write(line)
f.write("\n")
f.close()
run_cfg_file = "{}/{}/frr.sav".format(TMPDIR, rname)
init_cfg_file = "{}/{}/frr_json_initial.conf".format(TMPDIR, rname)
command = "/usr/lib/frr/frr-reload.py --input {} --test {} > {}".format(
run_cfg_file, init_cfg_file, dname
)
result = call(command, shell=True, stderr=SUB_STDOUT, stdout=SUB_PIPE)
# Assert if command fail
if result > 0:
logger.error("Delta file creation failed. Command executed %s", command)
with open(run_cfg_file, "r") as fd:
logger.info(
"Running configuration saved in %s is:\n%s", run_cfg_file, fd.read()
)
with open(init_cfg_file, "r") as fd:
logger.info(
"Test configuration saved in %s is:\n%s", init_cfg_file, fd.read()
)
err_cmd = ["/usr/bin/vtysh", "-m", "-f", run_cfg_file]
result = Popen(err_cmd, stdout=SUB_PIPE, stderr=SUB_PIPE)
output = result.communicate()
for out_data in output:
temp_data = out_data.decode("utf-8").lower()
for out_err in ERROR_LIST:
if out_err.lower() in temp_data:
logger.error(
"Found errors while validating data in" " %s", run_cfg_file
)
raise InvalidCLIError(out_data)
raise InvalidCLIError("Unknown error in %s", output)
f = open(dname, "r")
delta = StringIO.StringIO()
delta.write("configure terminal\n")
t_delta = f.read()
# Don't disable debugs
check_debug = True
for line in t_delta.split("\n"):
line = line.strip()
if line == "Lines To Delete" or line == "===============" or not line:
continue
if line == "Lines To Add":
check_debug = False
continue
if line == "============" or not line:
continue
# Leave debugs and log output alone
if check_debug:
if "debug" in line or "log file" in line:
continue
delta.write(line)
delta.write("\n")
f.close()
delta.write("end\n")
output = router.vtysh_multicmd(delta.getvalue(), pretty_output=False)
delta.close()
delta = StringIO.StringIO()
cfg = router.run("vtysh -c 'show running'")
for line in cfg.split("\n"):
line = line.strip()
delta.write(line)
delta.write("\n")
# Router current configuration to log file or console if
# "show_router_config" is defined in "pytest.ini"
if show_router_config:
logger.info("Configuration on router {} after reset:".format(rname))
logger.info(delta.getvalue())
delta.close()
logger.debug("Exiting API: reset_config_on_routers")
return True
def load_config_to_router(tgen, routerName, save_bkup=False):
"""
Loads configuration on router from the file FRRCFG_FILE.
Parameters
----------
* `tgen` : Topogen object
* `routerName` : router for which configuration to be loaded
* `save_bkup` : If True, Saves snapshot of FRRCFG_FILE to FRRCFG_BKUP_FILE
"""
logger.debug("Entering API: load_config_to_router")
router_list = tgen.routers()
for rname in ROUTER_LIST:
if routerName and rname != routerName:
continue
router = router_list[rname]
try:
frr_cfg_file = "{}/{}/{}".format(TMPDIR, rname, FRRCFG_FILE)
frr_cfg_bkup = "{}/{}/{}".format(TMPDIR, rname, FRRCFG_BKUP_FILE)
with open(frr_cfg_file, "r+") as cfg:
data = cfg.read()
logger.info(
"Applying following configuration on router"
" {}:\n{}".format(rname, data)
)
if save_bkup:
with open(frr_cfg_bkup, "w") as bkup:
bkup.write(data)
output = router.vtysh_multicmd(data, pretty_output=False)
for out_err in ERROR_LIST:
if out_err.lower() in output.lower():
raise InvalidCLIError("%s" % output)
cfg.truncate(0)
except IOError as err:
errormsg = (
"Unable to open config File. error(%s):" " %s",
(err.errno, err.strerror),
)
return errormsg
# Router current configuration to log file or console if
# "show_router_config" is defined in "pytest.ini"
if show_router_config:
logger.info("New configuration for router {}:".format(rname))
new_config = router.run("vtysh -c 'show running'")
logger.info(new_config)
logger.debug("Exiting API: load_config_to_router")
return True
def get_frr_ipv6_linklocal(tgen, router, intf=None, vrf=None):
"""
API to get the link local ipv6 address of a perticular interface using
FRR command 'show interface'
* `tgen`: tgen onject
* `router` : router for which hightest interface should be
calculated
* `intf` : interface for which linklocal address needs to be taken
* `vrf` : VRF name
Usage
-----
linklocal = get_frr_ipv6_linklocal(tgen, router, "intf1", RED_A)
Returns
-------
1) array of interface names to link local ips.
"""
router_list = tgen.routers()
for rname, rnode in router_list.iteritems():
if rname != router:
continue
linklocal = []
if vrf:
cmd = "show interface vrf {}".format(vrf)
else:
cmd = "show interface"
ifaces = router_list[router].run('vtysh -c "{}"'.format(cmd))
# Fix newlines (make them all the same)
ifaces = ("\n".join(ifaces.splitlines()) + "\n").splitlines()
interface = None
ll_per_if_count = 0
for line in ifaces:
# Interface name
m = re_search("Interface ([a-zA-Z0-9-]+) is", line)
if m:
interface = m.group(1).split(" ")[0]
ll_per_if_count = 0
# Interface ip
m1 = re_search("inet6 (fe80[:a-fA-F0-9]+[\/0-9]+)", line)
if m1:
local = m1.group(1)
ll_per_if_count += 1
if ll_per_if_count > 1:
linklocal += [["%s-%s" % (interface, ll_per_if_count), local]]
else:
linklocal += [[interface, local]]
if linklocal:
if intf:
return [_linklocal[1] for _linklocal in linklocal if _linklocal[0] == intf][
0
].split("/")[0]
return linklocal
else:
errormsg = "Link local ip missing on router {}"
return errormsg
def generate_support_bundle():
"""
API to generate support bundle on any verification ste failure.
it runs a python utility, /usr/lib/frr/generate_support_bundle.py,
which basically runs defined CLIs and dumps the data to specified location
"""
tgen = get_topogen()
router_list = tgen.routers()
test_name = sys._getframe(2).f_code.co_name
TMPDIR = os.path.join(LOGDIR, tgen.modname)
for rname, rnode in router_list.iteritems():
logger.info("Generating support bundle for {}".format(rname))
rnode.run("mkdir -p /var/log/frr")
bundle_log = rnode.run("python2 /usr/lib/frr/generate_support_bundle.py")
logger.info(bundle_log)
dst_bundle = "{}/{}/support_bundles/{}".format(TMPDIR, rname, test_name)
src_bundle = "/var/log/frr"
rnode.run("rm -rf {}".format(dst_bundle))
rnode.run("mkdir -p {}".format(dst_bundle))
rnode.run("mv -f {}/* {}".format(src_bundle, dst_bundle))
return True
def start_topology(tgen):
"""
Starting topology, create tmp files which are loaded to routers
to start deamons and then start routers
* `tgen` : topogen object
"""
global TMPDIR, ROUTER_LIST
# Starting topology
tgen.start_topology()
# Starting deamons
router_list = tgen.routers()
ROUTER_LIST = sorted(
router_list.keys(), key=lambda x: int(re_search("\d+", x).group(0))
)
TMPDIR = os.path.join(LOGDIR, tgen.modname)
router_list = tgen.routers()
for rname in ROUTER_LIST:
router = router_list[rname]
# It will help in debugging the failures, will give more details on which
# specific kernel version tests are failing
linux_ver = router.run("uname -a")
logger.info("Logging platform related details: \n %s \n", linux_ver)
try:
os.chdir(TMPDIR)
# Creating router named dir and empty zebra.conf bgpd.conf files
# inside the current directory
if os.path.isdir("{}".format(rname)):
os.system("rm -rf {}".format(rname))
os.mkdir("{}".format(rname))
os.system("chmod -R go+rw {}".format(rname))
os.chdir("{}/{}".format(TMPDIR, rname))
os.system("touch zebra.conf bgpd.conf")
else:
os.mkdir("{}".format(rname))
os.system("chmod -R go+rw {}".format(rname))
os.chdir("{}/{}".format(TMPDIR, rname))
os.system("touch zebra.conf bgpd.conf")
except IOError as (errno, strerror):
logger.error("I/O error({0}): {1}".format(errno, strerror))
# Loading empty zebra.conf file to router, to start the zebra deamon
router.load_config(
TopoRouter.RD_ZEBRA, "{}/{}/zebra.conf".format(TMPDIR, rname)
)
# Loading empty bgpd.conf file to router, to start the bgp deamon
router.load_config(TopoRouter.RD_BGP, "{}/{}/bgpd.conf".format(TMPDIR, rname))
# Starting routers
logger.info("Starting all routers once topology is created")
tgen.start_router()
def stop_router(tgen, router):
"""
Router"s current config would be saved to /etc/frr/ for each deamon
and router and its deamons would be stopped.
* `tgen` : topogen object
* `router`: Device under test
"""
router_list = tgen.routers()
# Saving router config to /etc/frr, which will be loaded to router
# when it starts
router_list[router].vtysh_cmd("write memory")
# Stop router
router_list[router].stop()
def start_router(tgen, router):
"""
Router will started and config would be loaded from /etc/frr/ for each
deamon
* `tgen` : topogen object
* `router`: Device under test
"""
logger.debug("Entering lib API: start_router")
try:
router_list = tgen.routers()
# Router and its deamons would be started and config would
# be loaded to router for each deamon from /etc/frr
router_list[router].start()
# Waiting for router to come up
sleep(5)
except Exception as e:
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: start_router()")
return True
def number_to_row(routerName):
"""
Returns the number for the router.
Calculation based on name a0 = row 0, a1 = row 1, b2 = row 2, z23 = row 23
etc
"""
return int(routerName[1:])
def number_to_column(routerName):
"""
Returns the number for the router.
Calculation based on name a0 = columnn 0, a1 = column 0, b2= column 1,
z23 = column 26 etc
"""
return ord(routerName[0]) - 97
#############################################
# Common APIs, will be used by all protocols
#############################################
def create_vrf_cfg(tgen, topo, input_dict=None, build=False):
"""
Create vrf configuration for created topology. VRF
configuration is provided in input json file.
VRF config is done in Linux Kernel:
* Create VRF
* Attach interface to VRF
* Bring up VRF
Parameters
----------
* `tgen` : Topogen object
* `topo` : json file data
* `input_dict` : Input dict data, required when configuring
from testcase
* `build` : Only for initial setup phase this is set as True.
Usage
-----
input_dict={
"r3": {
"links": {
"r2-link1": {"ipv4": "auto", "ipv6": "auto", "vrf": "RED_A"},
"r2-link2": {"ipv4": "auto", "ipv6": "auto", "vrf": "RED_B"},
"r2-link3": {"ipv4": "auto", "ipv6": "auto", "vrf": "BLUE_A"},
"r2-link4": {"ipv4": "auto", "ipv6": "auto", "vrf": "BLUE_B"},
},
"vrfs":[
{
"name": "RED_A",
"id": "1"
},
{
"name": "RED_B",
"id": "2"
},
{
"name": "BLUE_A",
"id": "3",
"delete": True
},
{
"name": "BLUE_B",
"id": "4"
}
]
}
}
result = create_vrf_cfg(tgen, topo, input_dict)
Returns
-------
True or False
"""
result = True
if not input_dict:
input_dict = deepcopy(topo)
else:
input_dict = deepcopy(input_dict)
try:
for c_router, c_data in input_dict.iteritems():
rnode = tgen.routers()[c_router]
if "vrfs" in c_data:
for vrf in c_data["vrfs"]:
config_data = []
del_action = vrf.setdefault("delete", False)
name = vrf.setdefault("name", None)
table_id = vrf.setdefault("id", None)
vni = vrf.setdefault("vni", None)
del_vni = vrf.setdefault("no_vni", None)
if del_action:
# Kernel cmd- Add VRF and table
cmd = "ip link del {} type vrf table {}".format(
vrf["name"], vrf["id"]
)
logger.info("[DUT: %s]: Running kernel cmd [%s]", c_router, cmd)
rnode.run(cmd)
# Kernel cmd - Bring down VRF
cmd = "ip link set dev {} down".format(name)
logger.info("[DUT: %s]: Running kernel cmd [%s]", c_router, cmd)
rnode.run(cmd)
else:
if name and table_id:
# Kernel cmd- Add VRF and table
cmd = "ip link add {} type vrf table {}".format(
name, table_id
)
logger.info(
"[DUT: %s]: Running kernel cmd " "[%s]", c_router, cmd
)
rnode.run(cmd)
# Kernel cmd - Bring up VRF
cmd = "ip link set dev {} up".format(name)
logger.info(
"[DUT: %s]: Running kernel " "cmd [%s]", c_router, cmd
)
rnode.run(cmd)
if "links" in c_data:
for destRouterLink, data in sorted(
c_data["links"].iteritems()
):
# Loopback interfaces
if "type" in data and data["type"] == "loopback":
interface_name = destRouterLink
else:
interface_name = data["interface"]
if "vrf" in data:
vrf_list = data["vrf"]
if type(vrf_list) is not list:
vrf_list = [vrf_list]
for _vrf in vrf_list:
cmd = "ip link set {} master {}".format(
interface_name, _vrf
)
logger.info(
"[DUT: %s]: Running" " kernel cmd [%s]",
c_router,
cmd,
)
rnode.run(cmd)
if vni:
config_data.append("vrf {}".format(vrf["name"]))
cmd = "vni {}".format(vni)
config_data.append(cmd)
if del_vni:
config_data.append("vrf {}".format(vrf["name"]))
cmd = "no vni {}".format(del_vni)
config_data.append(cmd)
result = create_common_configuration(
tgen, c_router, config_data, "vrf", build=build
)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
return result
def create_interface_in_kernel(
tgen, dut, name, ip_addr, vrf=None, netmask=None, create=True
):
"""
Cretae interfaces in kernel for ipv4/ipv6
Config is done in Linux Kernel:
Parameters
----------
* `tgen` : Topogen object
* `dut` : Device for which interfaces to be added
* `name` : interface name
* `ip_addr` : ip address for interface
* `vrf` : VRF name, to which interface will be associated
* `netmask` : netmask value, default is None
* `create`: Create interface in kernel, if created then no need
to create
"""
rnode = tgen.routers()[dut]
if create:
cmd = "sudo ip link add name {} type dummy".format(name)
rnode.run(cmd)
addr_type = validate_ip_address(ip_addr)
if addr_type == "ipv4":
cmd = "ifconfig {} {} netmask {}".format(name, ip_addr, netmask)
else:
cmd = "ifconfig {} inet6 add {}/{}".format(name, ip_addr, netmask)
rnode.run(cmd)
if vrf:
cmd = "ip link set {} master {}".format(name, vrf)
rnode.run(cmd)
def shutdown_bringup_interface_in_kernel(tgen, dut, intf_name, ifaceaction=False):
"""
Cretae interfaces in kernel for ipv4/ipv6
Config is done in Linux Kernel:
Parameters
----------
* `tgen` : Topogen object
* `dut` : Device for which interfaces to be added
* `intf_name` : interface name
* `ifaceaction` : False to shutdown and True to bringup the
ineterface
"""
rnode = tgen.routers()[dut]
cmd = "ip link set dev"
if ifaceaction:
action = "up"
cmd = "{} {} {}".format(cmd, intf_name, action)
else:
action = "down"
cmd = "{} {} {}".format(cmd, intf_name, action)
logger.info("[DUT: %s]: Running command: %s", dut, cmd)
rnode.run(cmd)
def validate_ip_address(ip_address):
"""
Validates the type of ip address
Parameters
----------
* `ip_address`: IPv4/IPv6 address
Returns
-------
Type of address as string
"""
if "/" in ip_address:
ip_address = ip_address.split("/")[0]
v4 = True
v6 = True
try:
socket.inet_aton(ip_address)
except socket.error as error:
logger.debug("Not a valid IPv4 address")
v4 = False
else:
return "ipv4"
try:
socket.inet_pton(socket.AF_INET6, ip_address)
except socket.error as error:
logger.debug("Not a valid IPv6 address")
v6 = False
else:
return "ipv6"
if not v4 and not v6:
raise Exception(
"InvalidIpAddr", "%s is neither valid IPv4 or IPv6" " address" % ip_address
)
def check_address_types(addr_type=None):
"""
Checks environment variable set and compares with the current address type
"""
addr_types_env = os.environ.get("ADDRESS_TYPES")
if not addr_types_env:
addr_types_env = "dual"
if addr_types_env == "dual":
addr_types = ["ipv4", "ipv6"]
elif addr_types_env == "ipv4":
addr_types = ["ipv4"]
elif addr_types_env == "ipv6":
addr_types = ["ipv6"]
if addr_type is None:
return addr_types
if addr_type not in addr_types:
logger.debug(
"{} not in supported/configured address types {}".format(
addr_type, addr_types
)
)
return False
return True
def generate_ips(network, no_of_ips):
"""
Returns list of IPs.
based on start_ip and no_of_ips
* `network` : from here the ip will start generating,
start_ip will be
* `no_of_ips` : these many IPs will be generated
"""
ipaddress_list = []
if type(network) is not list:
network = [network]
for start_ipaddr in network:
if "/" in start_ipaddr:
start_ip = start_ipaddr.split("/")[0]
mask = int(start_ipaddr.split("/")[1])
addr_type = validate_ip_address(start_ip)
if addr_type == "ipv4":
start_ip = ipaddress.IPv4Address(unicode(start_ip))
step = 2 ** (32 - mask)
if addr_type == "ipv6":
start_ip = ipaddress.IPv6Address(unicode(start_ip))
step = 2 ** (128 - mask)
next_ip = start_ip
count = 0
while count < no_of_ips:
ipaddress_list.append("{}/{}".format(next_ip, mask))
if addr_type == "ipv6":
next_ip = ipaddress.IPv6Address(int(next_ip) + step)
else:
next_ip += step
count += 1
return ipaddress_list
def find_interface_with_greater_ip(topo, router, loopback=True, interface=True):
"""
Returns highest interface ip for ipv4/ipv6. If loopback is there then
it will return highest IP from loopback IPs otherwise from physical
interface IPs.
* `topo` : json file data
* `router` : router for which hightest interface should be calculated
"""
link_data = topo["routers"][router]["links"]
lo_list = []
interfaces_list = []
lo_exists = False
for destRouterLink, data in sorted(link_data.iteritems()):
if loopback:
if "type" in data and data["type"] == "loopback":
lo_exists = True
ip_address = topo["routers"][router]["links"][destRouterLink][
"ipv4"
].split("/")[0]
lo_list.append(ip_address)
if interface:
ip_address = topo["routers"][router]["links"][destRouterLink]["ipv4"].split(
"/"
)[0]
interfaces_list.append(ip_address)
if lo_exists:
return sorted(lo_list)[-1]
return sorted(interfaces_list)[-1]
def write_test_header(tc_name):
""" Display message at beginning of test case"""
count = 20
logger.info("*" * (len(tc_name) + count))
step("START -> Testcase : %s" % tc_name, reset=True)
logger.info("*" * (len(tc_name) + count))
def write_test_footer(tc_name):
""" Display message at end of test case"""
count = 21
logger.info("=" * (len(tc_name) + count))
logger.info("Testcase : %s -> PASSED", tc_name)
logger.info("=" * (len(tc_name) + count))
def interface_status(tgen, topo, input_dict):
"""
Delete ip route maps from device
* `tgen` : Topogen object
* `topo` : json file data
* `input_dict` : for which router, route map has to be deleted
Usage
-----
input_dict = {
"r3": {
"interface_list": ['eth1-r1-r2', 'eth2-r1-r3'],
"status": "down"
}
}
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
try:
global frr_cfg
for router in input_dict.keys():
interface_list = input_dict[router]["interface_list"]
status = input_dict[router].setdefault("status", "up")
for intf in interface_list:
rnode = tgen.routers()[router]
interface_set_status(rnode, intf, status)
# Load config to router
load_config_to_router(tgen, router)
except Exception as e:
# handle any exception
logger.error("Error %s occured. Arguments %s.", e.message, e.args)
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def retry(attempts=3, wait=2, return_is_str=True, initial_wait=0, return_is_dict=False):
"""
Retries function execution, if return is an errormsg or exception
* `attempts`: Number of attempts to make
* `wait`: Number of seconds to wait between each attempt
* `return_is_str`: Return val is an errormsg in case of failure
* `initial_wait`: Sleeps for this much seconds before executing function
"""
def _retry(func):
@wraps(func)
def func_retry(*args, **kwargs):
_wait = kwargs.pop("wait", wait)
_attempts = kwargs.pop("attempts", attempts)
_attempts = int(_attempts)
if _attempts < 0:
raise ValueError("attempts must be 0 or greater")
if initial_wait > 0:
logger.info("Waiting for [%s]s as initial delay", initial_wait)
sleep(initial_wait)
_return_is_str = kwargs.pop("return_is_str", return_is_str)
_return_is_dict = kwargs.pop("return_is_str", return_is_dict)
for i in range(1, _attempts + 1):
try:
_expected = kwargs.setdefault("expected", True)
kwargs.pop("expected")
ret = func(*args, **kwargs)
logger.debug("Function returned %s" % ret)
if _return_is_str and isinstance(ret, bool) and _expected:
return ret
if (
isinstance(ret, str) or isinstance(ret, unicode)
) and _expected is False:
return ret
if _return_is_dict and isinstance(ret, dict):
return ret
if _attempts == i:
generate_support_bundle()
return ret
except Exception as err:
if _attempts == i:
generate_support_bundle()
logger.info("Max number of attempts (%r) reached", _attempts)
raise
else:
logger.info("Function returned %s", err)
if i < _attempts:
logger.info("Retry [#%r] after sleeping for %ss" % (i, _wait))
sleep(_wait)
func_retry._original = func
return func_retry
return _retry
class Stepper:
"""
Prints step number for the test case step being executed
"""
count = 1
def __call__(self, msg, reset):
if reset:
Stepper.count = 1
logger.info(msg)
else:
logger.info("STEP %s: '%s'", Stepper.count, msg)
Stepper.count += 1
def step(msg, reset=False):
"""
Call Stepper to print test steps. Need to reset at the beginning of test.
* ` msg` : Step message body.
* `reset` : Reset step count to 1 when set to True.
"""
_step = Stepper()
_step(msg, reset)
#############################################
# These APIs, will used by testcase
#############################################
def create_interfaces_cfg(tgen, topo, build=False):
"""
Create interface configuration for created topology. Basic Interface
configuration is provided in input json file.
Parameters
----------
* `tgen` : Topogen object
* `topo` : json file data
* `build` : Only for initial setup phase this is set as True.
Returns
-------
True or False
"""
result = False
topo = deepcopy(topo)
try:
for c_router, c_data in topo.iteritems():
interface_data = []
for destRouterLink, data in sorted(c_data["links"].iteritems()):
# Loopback interfaces
if "type" in data and data["type"] == "loopback":
interface_name = destRouterLink
else:
interface_name = data["interface"]
# Include vrf if present
if "vrf" in data:
interface_data.append(
"interface {} vrf {}".format(
str(interface_name), str(data["vrf"])
)
)
else:
interface_data.append("interface {}".format(str(interface_name)))
if "ipv4" in data:
intf_addr = c_data["links"][destRouterLink]["ipv4"]
if "delete" in data and data["delete"]:
interface_data.append("no ip address {}".format(intf_addr))
else:
interface_data.append("ip address {}".format(intf_addr))
if "ipv6" in data:
intf_addr = c_data["links"][destRouterLink]["ipv6"]
if "delete" in data and data["delete"]:
interface_data.append("no ipv6 address {}".format(intf_addr))
else:
interface_data.append("ipv6 address {}".format(intf_addr))
if "ipv6-link-local" in data:
intf_addr = c_data["links"][destRouterLink]["ipv6-link-local"]
if "delete" in data and data["delete"]:
interface_data.append("no ipv6 address {}".format(intf_addr))
else:
interface_data.append("ipv6 address {}\n".format(intf_addr))
result = create_common_configuration(
tgen, c_router, interface_data, "interface_config", build=build
)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
return result
def create_static_routes(tgen, input_dict, build=False):
"""
Create static routes for given router as defined in input_dict
Parameters
----------
* `tgen` : Topogen object
* `input_dict` : Input dict data, required when configuring from testcase
* `build` : Only for initial setup phase this is set as True.
Usage
-----
input_dict should be in the format below:
# static_routes: list of all routes
# network: network address
# no_of_ip: number of next-hop address that will be configured
# admin_distance: admin distance for route/routes.
# next_hop: starting next-hop address
# tag: tag id for static routes
# vrf: VRF name in which static routes needs to be created
# delete: True if config to be removed. Default False.
Example:
"routers": {
"r1": {
"static_routes": [
{
"network": "100.0.20.1/32",
"no_of_ip": 9,
"admin_distance": 100,
"next_hop": "10.0.0.1",
"tag": 4001,
"vrf": "RED_A"
"delete": true
}
]
}
}
Returns
-------
errormsg(str) or True
"""
result = False
logger.debug("Entering lib API: create_static_routes()")
input_dict = deepcopy(input_dict)
try:
for router in input_dict.keys():
if "static_routes" not in input_dict[router]:
errormsg = "static_routes not present in input_dict"
logger.info(errormsg)
continue
static_routes_list = []
static_routes = input_dict[router]["static_routes"]
for static_route in static_routes:
del_action = static_route.setdefault("delete", False)
no_of_ip = static_route.setdefault("no_of_ip", 1)
network = static_route.setdefault("network", [])
if type(network) is not list:
network = [network]
admin_distance = static_route.setdefault("admin_distance", None)
tag = static_route.setdefault("tag", None)
vrf = static_route.setdefault("vrf", None)
interface = static_route.setdefault("interface", None)
next_hop = static_route.setdefault("next_hop", None)
nexthop_vrf = static_route.setdefault("nexthop_vrf", None)
ip_list = generate_ips(network, no_of_ip)
for ip in ip_list:
addr_type = validate_ip_address(ip)
if addr_type == "ipv4":
cmd = "ip route {}".format(ip)
else:
cmd = "ipv6 route {}".format(ip)
if interface:
cmd = "{} {}".format(cmd, interface)
if next_hop:
cmd = "{} {}".format(cmd, next_hop)
if nexthop_vrf:
cmd = "{} nexthop-vrf {}".format(cmd, nexthop_vrf)
if vrf:
cmd = "{} vrf {}".format(cmd, vrf)
if tag:
cmd = "{} tag {}".format(cmd, str(tag))
if admin_distance:
cmd = "{} {}".format(cmd, admin_distance)
if del_action:
cmd = "no {}".format(cmd)
static_routes_list.append(cmd)
result = create_common_configuration(
tgen, router, static_routes_list, "static_route", build=build
)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: create_static_routes()")
return result
def create_prefix_lists(tgen, input_dict, build=False):
"""
Create ip prefix lists as per the config provided in input
JSON or input_dict
Parameters
----------
* `tgen` : Topogen object
* `input_dict` : Input dict data, required when configuring from testcase
* `build` : Only for initial setup phase this is set as True.
Usage
-----
# pf_lists_1: name of prefix-list, user defined
# seqid: prefix-list seqid, auto-generated if not given by user
# network: criteria for applying prefix-list
# action: permit/deny
# le: less than or equal number of bits
# ge: greater than or equal number of bits
Example
-------
input_dict = {
"r1": {
"prefix_lists":{
"ipv4": {
"pf_list_1": [
{
"seqid": 10,
"network": "any",
"action": "permit",
"le": "32",
"ge": "30",
"delete": True
}
]
}
}
}
}
Returns
-------
errormsg or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
result = False
try:
for router in input_dict.keys():
if "prefix_lists" not in input_dict[router]:
errormsg = "prefix_lists not present in input_dict"
logger.debug(errormsg)
continue
config_data = []
prefix_lists = input_dict[router]["prefix_lists"]
for addr_type, prefix_data in prefix_lists.iteritems():
if not check_address_types(addr_type):
continue
for prefix_name, prefix_list in prefix_data.iteritems():
for prefix_dict in prefix_list:
if "action" not in prefix_dict or "network" not in prefix_dict:
errormsg = "'action' or network' missing in" " input_dict"
return errormsg
network_addr = prefix_dict["network"]
action = prefix_dict["action"]
le = prefix_dict.setdefault("le", None)
ge = prefix_dict.setdefault("ge", None)
seqid = prefix_dict.setdefault("seqid", None)
del_action = prefix_dict.setdefault("delete", False)
if seqid is None:
seqid = get_seq_id("prefix_lists", router, prefix_name)
else:
set_seq_id("prefix_lists", router, seqid, prefix_name)
if addr_type == "ipv4":
protocol = "ip"
else:
protocol = "ipv6"
cmd = "{} prefix-list {} seq {} {} {}".format(
protocol, prefix_name, seqid, action, network_addr
)
if le:
cmd = "{} le {}".format(cmd, le)
if ge:
cmd = "{} ge {}".format(cmd, ge)
if del_action:
cmd = "no {}".format(cmd)
config_data.append(cmd)
result = create_common_configuration(
tgen, router, config_data, "prefix_list", build=build
)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return result
def create_route_maps(tgen, input_dict, build=False):
"""
Create route-map on the devices as per the arguments passed
Parameters
----------
* `tgen` : Topogen object
* `input_dict` : Input dict data, required when configuring from testcase
* `build` : Only for initial setup phase this is set as True.
Usage
-----
# route_maps: key, value pair for route-map name and its attribute
# rmap_match_prefix_list_1: user given name for route-map
# action: PERMIT/DENY
# match: key,value pair for match criteria. prefix_list, community-list,
large-community-list or tag. Only one option at a time.
# prefix_list: name of prefix list
# large-community-list: name of large community list
# community-ist: name of community list
# tag: tag id for static routes
# set: key, value pair for modifying route attributes
# localpref: preference value for the network
# med: metric value advertised for AS
# aspath: set AS path value
# weight: weight for the route
# community: standard community value to be attached
# large_community: large community value to be attached
# community_additive: if set to "additive", adds community/large-community
value to the existing values of the network prefix
Example:
--------
input_dict = {
"r1": {
"route_maps": {
"rmap_match_prefix_list_1": [
{
"action": "PERMIT",
"match": {
"ipv4": {
"prefix_list": "pf_list_1"
}
"ipv6": {
"prefix_list": "pf_list_1"
}
"large-community-list": {
"id": "community_1",
"exact_match": True
}
"community_list": {
"id": "community_2",
"exact_match": True
}
"tag": "tag_id"
},
"set": {
"locPrf": 150,
"metric": 30,
"path": {
"num": 20000,
"action": "prepend",
},
"weight": 500,
"community": {
"num": "1:2 2:3",
"action": additive
}
"large_community": {
"num": "1:2:3 4:5;6",
"action": additive
},
}
}
]
}
}
}
Returns
-------
errormsg(str) or True
"""
result = False
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
input_dict = deepcopy(input_dict)
try:
for router in input_dict.keys():
if "route_maps" not in input_dict[router]:
logger.debug("route_maps not present in input_dict")
continue
rmap_data = []
for rmap_name, rmap_value in input_dict[router]["route_maps"].iteritems():
for rmap_dict in rmap_value:
del_action = rmap_dict.setdefault("delete", False)
if del_action:
rmap_data.append("no route-map {}".format(rmap_name))
continue
if "action" not in rmap_dict:
errormsg = "action not present in input_dict"
logger.error(errormsg)
return False
rmap_action = rmap_dict.setdefault("action", "deny")
seq_id = rmap_dict.setdefault("seq_id", None)
if seq_id is None:
seq_id = get_seq_id("route_maps", router, rmap_name)
else:
set_seq_id("route_maps", router, seq_id, rmap_name)
rmap_data.append(
"route-map {} {} {}".format(rmap_name, rmap_action, seq_id)
)
if "continue" in rmap_dict:
continue_to = rmap_dict["continue"]
if continue_to:
rmap_data.append("on-match goto {}".format(continue_to))
else:
logger.error(
"In continue, 'route-map entry "
"sequence number' is not provided"
)
return False
if "goto" in rmap_dict:
go_to = rmap_dict["goto"]
if go_to:
rmap_data.append("on-match goto {}".format(go_to))
else:
logger.error(
"In goto, 'Goto Clause number' is not" " provided"
)
return False
if "call" in rmap_dict:
call_rmap = rmap_dict["call"]
if call_rmap:
rmap_data.append("call {}".format(call_rmap))
else:
logger.error(
"In call, 'destination Route-Map' is" " not provided"
)
return False
# Verifying if SET criteria is defined
if "set" in rmap_dict:
set_data = rmap_dict["set"]
ipv4_data = set_data.setdefault("ipv4", {})
ipv6_data = set_data.setdefault("ipv6", {})
local_preference = set_data.setdefault("locPrf", None)
metric = set_data.setdefault("metric", None)
as_path = set_data.setdefault("path", {})
weight = set_data.setdefault("weight", None)
community = set_data.setdefault("community", {})
large_community = set_data.setdefault("large_community", {})
large_comm_list = set_data.setdefault("large_comm_list", {})
set_action = set_data.setdefault("set_action", None)
nexthop = set_data.setdefault("nexthop", None)
origin = set_data.setdefault("origin", None)
ext_comm_list = set_data.setdefault("extcommunity", {})
# Local Preference
if local_preference:
rmap_data.append(
"set local-preference {}".format(local_preference)
)
# Metric
if metric:
rmap_data.append("set metric {} \n".format(metric))
# Origin
if origin:
rmap_data.append("set origin {} \n".format(origin))
# AS Path Prepend
if as_path:
as_num = as_path.setdefault("as_num", None)
as_action = as_path.setdefault("as_action", None)
if as_action and as_num:
rmap_data.append(
"set as-path {} {}".format(as_action, as_num)
)
# Community
if community:
num = community.setdefault("num", None)
comm_action = community.setdefault("action", None)
if num:
cmd = "set community {}".format(num)
if comm_action:
cmd = "{} {}".format(cmd, comm_action)
rmap_data.append(cmd)
else:
logger.error("In community, AS Num not" " provided")
return False
if large_community:
num = large_community.setdefault("num", None)
comm_action = large_community.setdefault("action", None)
if num:
cmd = "set large-community {}".format(num)
if comm_action:
cmd = "{} {}".format(cmd, comm_action)
rmap_data.append(cmd)
else:
logger.error(
"In large_community, AS Num not" " provided"
)
return False
if large_comm_list:
id = large_comm_list.setdefault("id", None)
del_comm = large_comm_list.setdefault("delete", None)
if id:
cmd = "set large-comm-list {}".format(id)
if del_comm:
cmd = "{} delete".format(cmd)
rmap_data.append(cmd)
else:
logger.error("In large_comm_list 'id' not" " provided")
return False
if ext_comm_list:
rt = ext_comm_list.setdefault("rt", None)
del_comm = ext_comm_list.setdefault("delete", None)
if rt:
cmd = "set extcommunity rt {}".format(rt)
if del_comm:
cmd = "{} delete".format(cmd)
rmap_data.append(cmd)
else:
logger.debug("In ext_comm_list 'rt' not" " provided")
return False
# Weight
if weight:
rmap_data.append("set weight {}".format(weight))
if ipv6_data:
nexthop = ipv6_data.setdefault("nexthop", None)
if nexthop:
rmap_data.append("set ipv6 next-hop {}".format(nexthop))
# Adding MATCH and SET sequence to RMAP if defined
if "match" in rmap_dict:
match_data = rmap_dict["match"]
ipv4_data = match_data.setdefault("ipv4", {})
ipv6_data = match_data.setdefault("ipv6", {})
community = match_data.setdefault("community_list", {})
large_community = match_data.setdefault("large_community", {})
large_community_list = match_data.setdefault(
"large_community_list", {}
)
metric = match_data.setdefault("metric", None)
source_vrf = match_data.setdefault("source-vrf", None)
if ipv4_data:
# fetch prefix list data from rmap
prefix_name = ipv4_data.setdefault("prefix_lists", None)
if prefix_name:
rmap_data.append(
"match ip address"
" prefix-list {}".format(prefix_name)
)
# fetch tag data from rmap
tag = ipv4_data.setdefault("tag", None)
if tag:
rmap_data.append("match tag {}".format(tag))
# fetch large community data from rmap
large_community_list = ipv4_data.setdefault(
"large_community_list", {}
)
large_community = match_data.setdefault(
"large_community", {}
)
if ipv6_data:
prefix_name = ipv6_data.setdefault("prefix_lists", None)
if prefix_name:
rmap_data.append(
"match ipv6 address"
" prefix-list {}".format(prefix_name)
)
# fetch tag data from rmap
tag = ipv6_data.setdefault("tag", None)
if tag:
rmap_data.append("match tag {}".format(tag))
# fetch large community data from rmap
large_community_list = ipv6_data.setdefault(
"large_community_list", {}
)
large_community = match_data.setdefault(
"large_community", {}
)
if community:
if "id" not in community:
logger.error(
"'id' is mandatory for "
"community-list in match"
" criteria"
)
return False
cmd = "match community {}".format(community["id"])
exact_match = community.setdefault("exact_match", False)
if exact_match:
cmd = "{} exact-match".format(cmd)
rmap_data.append(cmd)
if large_community:
if "id" not in large_community:
logger.error(
"'id' is mandatory for "
"large-community-list in match "
"criteria"
)
return False
cmd = "match large-community {}".format(
large_community["id"]
)
exact_match = large_community.setdefault(
"exact_match", False
)
if exact_match:
cmd = "{} exact-match".format(cmd)
rmap_data.append(cmd)
if large_community_list:
if "id" not in large_community_list:
logger.error(
"'id' is mandatory for "
"large-community-list in match "
"criteria"
)
return False
cmd = "match large-community {}".format(
large_community_list["id"]
)
exact_match = large_community_list.setdefault(
"exact_match", False
)
if exact_match:
cmd = "{} exact-match".format(cmd)
rmap_data.append(cmd)
if source_vrf:
cmd = "match source-vrf {}".format(source_vrf)
rmap_data.append(cmd)
if metric:
cmd = "match metric {}".format(metric)
rmap_data.append(cmd)
result = create_common_configuration(
tgen, router, rmap_data, "route_maps", build=build
)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return result
def delete_route_maps(tgen, input_dict):
"""
Delete ip route maps from device
* `tgen` : Topogen object
* `input_dict` : for which router,
route map has to be deleted
Usage
-----
# Delete route-map rmap_1 and rmap_2 from router r1
input_dict = {
"r1": {
"route_maps": ["rmap_1", "rmap__2"]
}
}
result = delete_route_maps("ipv4", input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for router in input_dict.keys():
route_maps = input_dict[router]["route_maps"][:]
rmap_data = input_dict[router]
rmap_data["route_maps"] = {}
for route_map_name in route_maps:
rmap_data["route_maps"].update({route_map_name: [{"delete": True}]})
return create_route_maps(tgen, input_dict)
def create_bgp_community_lists(tgen, input_dict, build=False):
"""
Create bgp community-list or large-community-list on the devices as per
the arguments passed. Takes list of communities in input.
Parameters
----------
* `tgen` : Topogen object
* `input_dict` : Input dict data, required when configuring from testcase
* `build` : Only for initial setup phase this is set as True.
Usage
-----
input_dict_1 = {
"r3": {
"bgp_community_lists": [
{
"community_type": "standard",
"action": "permit",
"name": "rmap_lcomm_{}".format(addr_type),
"value": "1:1:1 1:2:3 2:1:1 2:2:2",
"large": True
}
]
}
}
}
result = create_bgp_community_lists(tgen, input_dict_1)
"""
result = False
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
input_dict = deepcopy(input_dict)
try:
for router in input_dict.keys():
if "bgp_community_lists" not in input_dict[router]:
errormsg = "bgp_community_lists not present in input_dict"
logger.debug(errormsg)
continue
config_data = []
community_list = input_dict[router]["bgp_community_lists"]
for community_dict in community_list:
del_action = community_dict.setdefault("delete", False)
community_type = community_dict.setdefault("community_type", None)
action = community_dict.setdefault("action", None)
value = community_dict.setdefault("value", "")
large = community_dict.setdefault("large", None)
name = community_dict.setdefault("name", None)
if large:
cmd = "bgp large-community-list"
else:
cmd = "bgp community-list"
if not large and not (community_type and action and value):
errormsg = (
"community_type, action and value are "
"required in bgp_community_list"
)
logger.error(errormsg)
return False
try:
community_type = int(community_type)
cmd = "{} {} {} {}".format(cmd, community_type, action, value)
except ValueError:
cmd = "{} {} {} {} {}".format(
cmd, community_type, name, action, value
)
if del_action:
cmd = "no {}".format(cmd)
config_data.append(cmd)
result = create_common_configuration(
tgen, router, config_data, "bgp_community_list", build=build
)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return result
def shutdown_bringup_interface(tgen, dut, intf_name, ifaceaction=False):
"""
Shutdown or bringup router's interface "
* `tgen` : Topogen object
* `dut` : Device under test
* `intf_name` : Interface name to be shut/no shut
* `ifaceaction` : Action, to shut/no shut interface,
by default is False
Usage
-----
dut = "r3"
intf = "r3-r1-eth0"
# Shut down ineterface
shutdown_bringup_interface(tgen, dut, intf, False)
# Bring up ineterface
shutdown_bringup_interface(tgen, dut, intf, True)
Returns
-------
errormsg(str) or True
"""
router_list = tgen.routers()
if ifaceaction:
logger.info("Bringing up interface : {}".format(intf_name))
else:
logger.info("Shutting down interface : {}".format(intf_name))
interface_set_status(router_list[dut], intf_name, ifaceaction)
def addKernelRoute(
tgen, router, intf, group_addr_range, next_hop=None, src=None, del_action=None
):
"""
Add route to kernel
Parameters:
-----------
* `tgen` : Topogen object
* `router`: router for which kernal routes needs to be added
* `intf`: interface name, for which kernal routes needs to be added
* `bindToAddress`: bind to <host>, an interface or multicast
address
returns:
--------
errormsg or True
"""
logger.debug("Entering lib API: addKernelRoute()")
rnode = tgen.routers()[router]
if type(group_addr_range) is not list:
group_addr_range = [group_addr_range]
for grp_addr in group_addr_range:
addr_type = validate_ip_address(grp_addr)
if addr_type == "ipv4":
if next_hop is not None:
cmd = "ip route add {} via {}".format(grp_addr, next_hop)
else:
cmd = "ip route add {} dev {}".format(grp_addr, intf)
if del_action:
cmd = "ip route del {}".format(grp_addr)
verify_cmd = "ip route"
elif addr_type == "ipv6":
if intf and src:
cmd = "ip -6 route add {} dev {} src {}".format(grp_addr, intf, src)
else:
cmd = "ip -6 route add {} via {}".format(grp_addr, next_hop)
verify_cmd = "ip -6 route"
if del_action:
cmd = "ip -6 route del {}".format(grp_addr)
logger.info("[DUT: {}]: Running command: [{}]".format(router, cmd))
output = rnode.run(cmd)
# Verifying if ip route added to kernal
result = rnode.run(verify_cmd)
logger.debug("{}\n{}".format(verify_cmd, result))
if "/" in grp_addr:
ip, mask = grp_addr.split("/")
if mask == "32" or mask == "128":
grp_addr = ip
if not re_search(r"{}".format(grp_addr), result) and mask is not "0":
errormsg = (
"[DUT: {}]: Kernal route is not added for group"
" address {} Config output: {}".format(router, grp_addr, output)
)
return errormsg
logger.debug("Exiting lib API: addKernelRoute()")
return True
def configure_vxlan(tgen, input_dict):
"""
Add and configure vxlan
* `tgen`: tgen onject
* `input_dict` : data for vxlan config
Usage:
------
input_dict= {
"dcg2":{
"vxlan":[{
"vxlan_name": "vxlan75100",
"vxlan_id": "75100",
"dstport": 4789,
"local_addr": "120.0.0.1",
"learning": "no",
"delete": True
}]
}
}
configure_vxlan(tgen, input_dict)
Returns:
-------
True or errormsg
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
router_list = tgen.routers()
for dut in input_dict.keys():
rnode = tgen.routers()[dut]
if "vxlan" in input_dict[dut]:
for vxlan_dict in input_dict[dut]["vxlan"]:
cmd = "ip link "
del_vxlan = vxlan_dict.setdefault("delete", None)
vxlan_names = vxlan_dict.setdefault("vxlan_name", [])
vxlan_ids = vxlan_dict.setdefault("vxlan_id", [])
dstport = vxlan_dict.setdefault("dstport", None)
local_addr = vxlan_dict.setdefault("local_addr", None)
learning = vxlan_dict.setdefault("learning", None)
config_data = []
if vxlan_names and vxlan_ids:
for vxlan_name, vxlan_id in zip(vxlan_names, vxlan_ids):
cmd = "ip link"
if del_vxlan:
cmd = "{} del {} type vxlan id {}".format(
cmd, vxlan_name, vxlan_id
)
else:
cmd = "{} add {} type vxlan id {}".format(
cmd, vxlan_name, vxlan_id
)
if dstport:
cmd = "{} dstport {}".format(cmd, dstport)
if local_addr:
ip_cmd = "ip addr add {} dev {}".format(
local_addr, vxlan_name
)
if del_vxlan:
ip_cmd = "ip addr del {} dev {}".format(
local_addr, vxlan_name
)
config_data.append(ip_cmd)
cmd = "{} local {}".format(cmd, local_addr)
if learning == "no":
cmd = "{} nolearning".format(cmd)
elif learning == "yes":
cmd = "{} learning".format(cmd)
config_data.append(cmd)
try:
for _cmd in config_data:
logger.info("[DUT: %s]: Running command: %s", dut, _cmd)
rnode.run(_cmd)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def configure_brctl(tgen, topo, input_dict):
"""
Add and configure brctl
* `tgen`: tgen onject
* `input_dict` : data for brctl config
Usage:
------
input_dict= {
dut:{
"brctl": [{
"brctl_name": "br100",
"addvxlan": "vxlan75100",
"vrf": "RED",
"stp": "off"
}]
}
}
configure_brctl(tgen, topo, input_dict)
Returns:
-------
True or errormsg
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
router_list = tgen.routers()
for dut in input_dict.keys():
rnode = tgen.routers()[dut]
if "brctl" in input_dict[dut]:
for brctl_dict in input_dict[dut]["brctl"]:
brctl_names = brctl_dict.setdefault("brctl_name", [])
addvxlans = brctl_dict.setdefault("addvxlan", [])
stp_values = brctl_dict.setdefault("stp", [])
vrfs = brctl_dict.setdefault("vrf", [])
ip_cmd = "ip link set"
for brctl_name, vxlan, vrf, stp in zip(
brctl_names, addvxlans, vrfs, stp_values
):
ip_cmd_list = []
cmd = "ip link add name {} type bridge stp_state {}".format(brctl_name, stp)
logger.info("[DUT: %s]: Running command: %s", dut, cmd)
rnode.run(cmd)
ip_cmd_list.append("{} up dev {}".format(ip_cmd, brctl_name))
if vxlan:
cmd = "{} dev {} master {}".format(ip_cmd, vxlan, brctl_name)
logger.info("[DUT: %s]: Running command: %s", dut, cmd)
rnode.run(cmd)
ip_cmd_list.append("{} up dev {}".format(ip_cmd, vxlan))
if vrf:
ip_cmd_list.append(
"{} dev {} master {}".format(ip_cmd, brctl_name, vrf)
)
for intf_name, data in topo["routers"][dut]["links"].items():
if "vrf" not in data:
continue
if data["vrf"] == vrf:
ip_cmd_list.append(
"{} up dev {}".format(ip_cmd, data["interface"])
)
try:
for _ip_cmd in ip_cmd_list:
logger.info("[DUT: %s]: Running command: %s", dut, _ip_cmd)
rnode.run(_ip_cmd)
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def configure_interface_mac(tgen, input_dict):
"""
Add and configure brctl
* `tgen`: tgen onject
* `input_dict` : data for mac config
input_mac= {
"edge1":{
"br75100": "00:80:48:BA:d1:00,
"br75200": "00:80:48:BA:d1:00
}
}
configure_interface_mac(tgen, input_mac)
Returns:
-------
True or errormsg
"""
router_list = tgen.routers()
for dut in input_dict.keys():
rnode = tgen.routers()[dut]
for intf, mac in input_dict[dut].items():
cmd = "ifconfig {} hw ether {}".format(intf, mac)
logger.info("[DUT: %s]: Running command: %s", dut, cmd)
try:
result = rnode.run(cmd)
if len(result) != 0:
return result
except InvalidCLIError:
# Traceback
errormsg = traceback.format_exc()
logger.error(errormsg)
return errormsg
return True
#############################################
# Verification APIs
#############################################
@retry(attempts=5, wait=2, return_is_str=True, initial_wait=2)
def verify_rib(
tgen,
addr_type,
dut,
input_dict,
next_hop=None,
protocol=None,
tag=None,
metric=None,
fib=None,
):
"""
Data will be read from input_dict or input JSON file, API will generate
same prefixes, which were redistributed by either create_static_routes() or
advertise_networks_using_network_command() and do will verify next_hop and
each prefix/routes is present in "show ip/ipv6 route {bgp/stataic} json"
command o/p.
Parameters
----------
* `tgen` : topogen object
* `addr_type` : ip type, ipv4/ipv6
* `dut`: Device Under Test, for which user wants to test the data
* `input_dict` : input dict, has details of static routes
* `next_hop`[optional]: next_hop which needs to be verified,
default: static
* `protocol`[optional]: protocol, default = None
Usage
-----
# RIB can be verified for static routes OR network advertised using
network command. Following are input_dicts to create static routes
and advertise networks using network command. Any one of the input_dict
can be passed to verify_rib() to verify routes in DUT"s RIB.
# Creating static routes for r1
input_dict = {
"r1": {
"static_routes": [{"network": "10.0.20.1/32", "no_of_ip": 9, \
"admin_distance": 100, "next_hop": "10.0.0.2", "tag": 4001}]
}}
# Advertising networks using network command in router r1
input_dict = {
"r1": {
"advertise_networks": [{"start_ip": "20.0.0.0/32",
"no_of_network": 10},
{"start_ip": "30.0.0.0/32"}]
}}
# Verifying ipv4 routes in router r1 learned via BGP
dut = "r2"
protocol = "bgp"
result = verify_rib(tgen, "ipv4", dut, input_dict, protocol = protocol)
Returns
-------
errormsg(str) or True
"""
logger.info("Entering lib API: verify_rib()")
router_list = tgen.routers()
additional_nexthops_in_required_nhs = []
found_hops = []
for routerInput in input_dict.keys():
for router, rnode in router_list.iteritems():
if router != dut:
continue
logger.info("Checking router %s RIB:", router)
# Verifying RIB routes
if addr_type == "ipv4":
command = "show ip route"
else:
command = "show ipv6 route"
found_routes = []
missing_routes = []
if "static_routes" in input_dict[routerInput]:
static_routes = input_dict[routerInput]["static_routes"]
for static_route in static_routes:
if "vrf" in static_route and static_route["vrf"] is not None:
logger.info(
"[DUT: {}]: Verifying routes for VRF:"
" {}".format(router, static_route["vrf"])
)
cmd = "{} vrf {}".format(command, static_route["vrf"])
else:
cmd = "{}".format(command)
if protocol:
cmd = "{} {}".format(cmd, protocol)
cmd = "{} json".format(cmd)
rib_routes_json = run_frr_cmd(rnode, cmd, isjson=True)
# Verifying output dictionary rib_routes_json is not empty
if bool(rib_routes_json) is False:
errormsg = "No route found in rib of router {}..".format(router)
return errormsg
network = static_route["network"]
if "no_of_ip" in static_route:
no_of_ip = static_route["no_of_ip"]
else:
no_of_ip = 1
if "tag" in static_route:
_tag = static_route["tag"]
else:
_tag = None
# Generating IPs for verification
ip_list = generate_ips(network, no_of_ip)
st_found = False
nh_found = False
for st_rt in ip_list:
st_rt = str(ipaddress.ip_network(unicode(st_rt)))
_addr_type = validate_ip_address(st_rt)
if _addr_type != addr_type:
continue
if st_rt in rib_routes_json:
st_found = True
found_routes.append(st_rt)
if fib and next_hop:
if type(next_hop) is not list:
next_hop = [next_hop]
for mnh in range(0, len(rib_routes_json[st_rt])):
if (
"fib"
in rib_routes_json[st_rt][mnh]["nexthops"][0]
):
found_hops.append(
[
rib_r["ip"]
for rib_r in rib_routes_json[st_rt][
mnh
]["nexthops"]
]
)
if found_hops[0]:
missing_list_of_nexthops = set(
found_hops[0]
).difference(next_hop)
additional_nexthops_in_required_nhs = set(
next_hop
).difference(found_hops[0])
if additional_nexthops_in_required_nhs:
logger.info(
"Nexthop "
"%s is not active for route %s in "
"RIB of router %s\n",
additional_nexthops_in_required_nhs,
st_rt,
dut,
)
errormsg = (
"Nexthop {} is not active"
" for route {} in RIB of router"
" {}\n".format(
additional_nexthops_in_required_nhs,
st_rt,
dut,
)
)
return errormsg
else:
nh_found = True
elif next_hop and fib is None:
if type(next_hop) is not list:
next_hop = [next_hop]
found_hops = [
rib_r["ip"]
for rib_r in rib_routes_json[st_rt][0]["nexthops"]
]
if found_hops:
missing_list_of_nexthops = set(
found_hops
).difference(next_hop)
additional_nexthops_in_required_nhs = set(
next_hop
).difference(found_hops)
if additional_nexthops_in_required_nhs:
logger.info(
"Missing nexthop %s for route"
" %s in RIB of router %s\n",
additional_nexthops_in_required_nhs,
st_rt,
dut,
)
errormsg = (
"Nexthop {} is Missing for "
"route {} in RIB of router {}\n".format(
additional_nexthops_in_required_nhs,
st_rt,
dut,
)
)
return errormsg
else:
nh_found = True
if tag:
if "tag" not in rib_routes_json[st_rt][0]:
errormsg = (
"[DUT: {}]: tag is not"
" present for"
" route {} in RIB \n".format(dut, st_rt)
)
return errormsg
if _tag != rib_routes_json[st_rt][0]["tag"]:
errormsg = (
"[DUT: {}]: tag value {}"
" is not matched for"
" route {} in RIB \n".format(dut, _tag, st_rt,)
)
return errormsg
if metric is not None:
if "metric" not in rib_routes_json[st_rt][0]:
errormsg = (
"[DUT: {}]: metric is"
" not present for"
" route {} in RIB \n".format(dut, st_rt)
)
return errormsg
if metric != rib_routes_json[st_rt][0]["metric"]:
errormsg = (
"[DUT: {}]: metric value "
"{} is not matched for "
"route {} in RIB \n".format(dut, metric, st_rt,)
)
return errormsg
else:
missing_routes.append(st_rt)
if nh_found:
logger.info(
"[DUT: {}]: Found next_hop {} for all bgp"
" routes in RIB".format(router, next_hop)
)
if len(missing_routes) > 0:
errormsg = "[DUT: {}]: Missing route in RIB, " "routes: {}".format(
dut, missing_routes
)
return errormsg
if found_routes:
logger.info(
"[DUT: %s]: Verified routes in RIB, found" " routes are: %s\n",
dut,
found_routes,
)
continue
if "bgp" in input_dict[routerInput]:
if (
"advertise_networks"
not in input_dict[routerInput]["bgp"]["address_family"][addr_type][
"unicast"
]
):
continue
found_routes = []
missing_routes = []
advertise_network = input_dict[routerInput]["bgp"]["address_family"][
addr_type
]["unicast"]["advertise_networks"]
# Continue if there are no network advertise
if len(advertise_network) == 0:
continue
for advertise_network_dict in advertise_network:
if "vrf" in advertise_network_dict:
cmd = "{} vrf {} json".format(command, static_route["vrf"])
else:
cmd = "{} json".format(command)
rib_routes_json = run_frr_cmd(rnode, cmd, isjson=True)
# Verifying output dictionary rib_routes_json is not empty
if bool(rib_routes_json) is False:
errormsg = "No route found in rib of router {}..".format(router)
return errormsg
start_ip = advertise_network_dict["network"]
if "no_of_network" in advertise_network_dict:
no_of_network = advertise_network_dict["no_of_network"]
else:
no_of_network = 1
# Generating IPs for verification
ip_list = generate_ips(start_ip, no_of_network)
st_found = False
nh_found = False
for st_rt in ip_list:
st_rt = str(ipaddress.ip_network(unicode(st_rt)))
_addr_type = validate_ip_address(st_rt)
if _addr_type != addr_type:
continue
if st_rt in rib_routes_json:
st_found = True
found_routes.append(st_rt)
if next_hop:
if type(next_hop) is not list:
next_hop = [next_hop]
count = 0
for nh in next_hop:
for nh_dict in rib_routes_json[st_rt][0]["nexthops"]:
if nh_dict["ip"] != nh:
continue
else:
count += 1
if count == len(next_hop):
nh_found = True
else:
errormsg = (
"Nexthop {} is Missing"
" for route {} in "
"RIB of router {}\n".format(next_hop, st_rt, dut)
)
return errormsg
else:
missing_routes.append(st_rt)
if nh_found:
logger.info(
"Found next_hop {} for all routes in RIB"
" of router {}\n".format(next_hop, dut)
)
if len(missing_routes) > 0:
errormsg = (
"Missing {} route in RIB of router {}, "
"routes: {} \n".format(addr_type, dut, missing_routes)
)
return errormsg
if found_routes:
logger.info(
"Verified {} routes in router {} RIB, found"
" routes are: {}\n".format(addr_type, dut, found_routes)
)
logger.info("Exiting lib API: verify_rib()")
return True
def verify_admin_distance_for_static_routes(tgen, input_dict):
"""
API to verify admin distance for static routes as defined in input_dict/
input JSON by running show ip/ipv6 route json command.
Parameter
---------
* `tgen` : topogen object
* `input_dict`: having details like - for which router and static routes
admin dsitance needs to be verified
Usage
-----
# To verify admin distance is 10 for prefix 10.0.20.1/32 having next_hop
10.0.0.2 in router r1
input_dict = {
"r1": {
"static_routes": [{
"network": "10.0.20.1/32",
"admin_distance": 10,
"next_hop": "10.0.0.2"
}]
}
}
result = verify_admin_distance_for_static_routes(tgen, input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for router in input_dict.keys():
if router not in tgen.routers():
continue
rnode = tgen.routers()[router]
for static_route in input_dict[router]["static_routes"]:
addr_type = validate_ip_address(static_route["network"])
# Command to execute
if addr_type == "ipv4":
command = "show ip route json"
else:
command = "show ipv6 route json"
show_ip_route_json = run_frr_cmd(rnode, command, isjson=True)
logger.info(
"Verifying admin distance for static route %s" " under dut %s:",
static_route,
router,
)
network = static_route["network"]
next_hop = static_route["next_hop"]
admin_distance = static_route["admin_distance"]
route_data = show_ip_route_json[network][0]
if network in show_ip_route_json:
if route_data["nexthops"][0]["ip"] == next_hop:
if route_data["distance"] != admin_distance:
errormsg = (
"Verification failed: admin distance"
" for static route {} under dut {},"
" found:{} but expected:{}".format(
static_route,
router,
route_data["distance"],
admin_distance,
)
)
return errormsg
else:
logger.info(
"Verification successful: admin"
" distance for static route %s under"
" dut %s, found:%s",
static_route,
router,
route_data["distance"],
)
else:
errormsg = (
"Static route {} not found in "
"show_ip_route_json for dut {}".format(network, router)
)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def verify_prefix_lists(tgen, input_dict):
"""
Running "show ip prefix-list" command and verifying given prefix-list
is present in router.
Parameters
----------
* `tgen` : topogen object
* `input_dict`: data to verify prefix lists
Usage
-----
# To verify pf_list_1 is present in router r1
input_dict = {
"r1": {
"prefix_lists": ["pf_list_1"]
}}
result = verify_prefix_lists("ipv4", input_dict, tgen)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for router in input_dict.keys():
if router not in tgen.routers():
continue
rnode = tgen.routers()[router]
# Show ip prefix list
show_prefix_list = run_frr_cmd(rnode, "show ip prefix-list")
# Verify Prefix list is deleted
prefix_lists_addr = input_dict[router]["prefix_lists"]
for addr_type in prefix_lists_addr:
if not check_address_types(addr_type):
continue
for prefix_list in prefix_lists_addr[addr_type].keys():
if prefix_list in show_prefix_list:
errormsg = (
"Prefix list {} is/are present in the router"
" {}".format(prefix_list, router)
)
return errormsg
logger.info(
"Prefix list %s is/are not present in the router" " from router %s",
prefix_list,
router,
)
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
@retry(attempts=2, wait=4, return_is_str=True, initial_wait=2)
def verify_route_maps(tgen, input_dict):
"""
Running "show route-map" command and verifying given route-map
is present in router.
Parameters
----------
* `tgen` : topogen object
* `input_dict`: data to verify prefix lists
Usage
-----
# To verify rmap_1 and rmap_2 are present in router r1
input_dict = {
"r1": {
"route_maps": ["rmap_1", "rmap_2"]
}
}
result = verify_route_maps(tgen, input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for router in input_dict.keys():
if router not in tgen.routers():
continue
rnode = tgen.routers()[router]
# Show ip route-map
show_route_maps = rnode.vtysh_cmd("show route-map")
# Verify route-map is deleted
route_maps = input_dict[router]["route_maps"]
for route_map in route_maps:
if route_map in show_route_maps:
errormsg = "Route map {} is not deleted from router" " {}".format(
route_map, router
)
return errormsg
logger.info(
"Route map %s is/are deleted successfully from" " router %s",
route_maps,
router,
)
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
@retry(attempts=3, wait=4, return_is_str=True)
def verify_bgp_community(tgen, addr_type, router, network, input_dict=None):
"""
API to veiryf BGP large community is attached in route for any given
DUT by running "show bgp ipv4/6 {route address} json" command.
Parameters
----------
* `tgen`: topogen object
* `addr_type` : ip type, ipv4/ipv6
* `dut`: Device Under Test
* `network`: network for which set criteria needs to be verified
* `input_dict`: having details like - for which router, community and
values needs to be verified
Usage
-----
networks = ["200.50.2.0/32"]
input_dict = {
"largeCommunity": "2:1:1 2:2:2 2:3:3 2:4:4 2:5:5"
}
result = verify_bgp_community(tgen, "ipv4", dut, network, input_dict=None)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
if router not in tgen.routers():
return False
rnode = tgen.routers()[router]
logger.debug(
"Verifying BGP community attributes on dut %s: for %s " "network %s",
router,
addr_type,
network,
)
for net in network:
cmd = "show bgp {} {} json".format(addr_type, net)
show_bgp_json = rnode.vtysh_cmd(cmd, isjson=True)
logger.info(show_bgp_json)
if "paths" not in show_bgp_json:
return "Prefix {} not found in BGP table of router: {}".format(net, router)
as_paths = show_bgp_json["paths"]
found = False
for i in range(len(as_paths)):
if (
"largeCommunity" in show_bgp_json["paths"][i]
or "community" in show_bgp_json["paths"][i]
):
found = True
logger.info(
"Large Community attribute is found for route:" " %s in router: %s",
net,
router,
)
if input_dict is not None:
for criteria, comm_val in input_dict.items():
show_val = show_bgp_json["paths"][i][criteria]["string"]
if comm_val == show_val:
logger.info(
"Verifying BGP %s for prefix: %s"
" in router: %s, found expected"
" value: %s",
criteria,
net,
router,
comm_val,
)
else:
errormsg = (
"Failed: Verifying BGP attribute"
" {} for route: {} in router: {}"
", expected value: {} but found"
": {}".format(criteria, net, router, comm_val, show_val)
)
return errormsg
if not found:
errormsg = (
"Large Community attribute is not found for route: "
"{} in router: {} ".format(net, router)
)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def verify_create_community_list(tgen, input_dict):
"""
API is to verify if large community list is created for any given DUT in
input_dict by running "sh bgp large-community-list {"comm_name"} detail"
command.
Parameters
----------
* `tgen`: topogen object
* `input_dict`: having details like - for which router, large community
needs to be verified
Usage
-----
input_dict = {
"r1": {
"large-community-list": {
"standard": {
"Test1": [{"action": "PERMIT", "attribute":\
""}]
}}}}
result = verify_create_community_list(tgen, input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for router in input_dict.keys():
if router not in tgen.routers():
continue
rnode = tgen.routers()[router]
logger.info("Verifying large-community is created for dut %s:", router)
for comm_data in input_dict[router]["bgp_community_lists"]:
comm_name = comm_data["name"]
comm_type = comm_data["community_type"]
show_bgp_community = run_frr_cmd(
rnode, "show bgp large-community-list {} detail".format(comm_name)
)
# Verify community list and type
if comm_name in show_bgp_community and comm_type in show_bgp_community:
logger.info(
"BGP %s large-community-list %s is" " created", comm_type, comm_name
)
else:
errormsg = "BGP {} large-community-list {} is not" " created".format(
comm_type, comm_name
)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
def verify_cli_json(tgen, input_dict):
"""
API to verify if JSON is available for clis
command.
Parameters
----------
* `tgen`: topogen object
* `input_dict`: CLIs for which JSON needs to be verified
Usage
-----
input_dict = {
"edge1":{
"cli": ["show evpn vni detail", show evpn rmac vni all]
}
}
result = verify_cli_json(tgen, input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for dut in input_dict.keys():
rnode = tgen.routers()[dut]
for cli in input_dict[dut]["cli"]:
logger.info(
"[DUT: %s]: Verifying JSON is available for " "CLI %s :", dut, cli
)
test_cli = "{} json".format(cli)
ret_json = rnode.vtysh_cmd(test_cli, isjson=True)
if not bool(ret_json):
errormsg = "CLI: %s, JSON format is not available" % (cli)
return errormsg
elif "unknown" in ret_json or "Unknown" in ret_json:
errormsg = "CLI: %s, JSON format is not available" % (cli)
return errormsg
else:
logger.info(
"CLI : %s JSON format is available: " "\n %s", cli, ret_json
)
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return True
@retry(attempts=2, wait=4, return_is_str=True, initial_wait=2)
def verify_evpn_vni(tgen, input_dict):
"""
API to verify evpn vni details using "show evpn vni detail json"
command.
Parameters
----------
* `tgen`: topogen object
* `input_dict`: having details like - for which router, evpn details
needs to be verified
Usage
-----
input_dict = {
"edge1":{
"vni": [
{
"75100":{
"vrf": "RED",
"vxlanIntf": "vxlan75100",
"localVtepIp": "120.1.1.1",
"sviIntf": "br100"
}
}
]
}
}
result = verify_evpn_vni(tgen, input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for dut in input_dict.keys():
rnode = tgen.routers()[dut]
logger.info("[DUT: %s]: Verifying evpn vni details :", dut)
cmd = "show evpn vni detail json"
evpn_all_vni_json = run_frr_cmd(rnode, cmd, isjson=True)
if not bool(evpn_all_vni_json):
errormsg = "No output for '{}' cli".format(cmd)
return errormsg
if "vni" in input_dict[dut]:
for vni_dict in input_dict[dut]["vni"]:
found = False
vni = vni_dict["name"]
for evpn_vni_json in evpn_all_vni_json:
if "vni" in evpn_vni_json:
if evpn_vni_json["vni"] != int(vni):
continue
for attribute in vni_dict.keys():
if vni_dict[attribute] != evpn_vni_json[attribute]:
errormsg = (
"[DUT: %s] Verifying "
"%s for VNI: %s [FAILED]||"
", EXPECTED : %s "
" FOUND : %s"
% (
dut,
attribute,
vni,
vni_dict[attribute],
evpn_vni_json[attribute],
)
)
return errormsg
else:
found = True
logger.info(
"[DUT: %s] Verifying"
" %s for VNI: %s , "
"Found Expected : %s ",
dut,
attribute,
vni,
evpn_vni_json[attribute],
)
if evpn_vni_json["state"] != "Up":
errormsg = (
"[DUT: %s] Failed: Verifying"
" State for VNI: %s is not Up" % (dut, vni)
)
return errormsg
else:
errormsg = (
"[DUT: %s] Failed:"
" VNI: %s is not present in JSON" % (dut, vni)
)
return errormsg
if found:
logger.info(
"[DUT %s]: Verifying VNI : %s "
"details and state is Up [PASSED]!!",
dut,
vni,
)
return True
else:
errormsg = (
"[DUT: %s] Failed:" " vni details are not present in input data" % (dut)
)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return False
@retry(attempts=2, wait=4, return_is_str=True, initial_wait=2)
def verify_vrf_vni(tgen, input_dict):
"""
API to verify vrf vni details using "show vrf vni json"
command.
Parameters
----------
* `tgen`: topogen object
* `input_dict`: having details like - for which router, evpn details
needs to be verified
Usage
-----
input_dict = {
"edge1":{
"vrfs": [
{
"RED":{
"vni": 75000,
"vxlanIntf": "vxlan75100",
"sviIntf": "br100",
"routerMac": "00:80:48:ba:d1:00",
"state": "Up"
}
}
]
}
}
result = verify_vrf_vni(tgen, input_dict)
Returns
-------
errormsg(str) or True
"""
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
for dut in input_dict.keys():
rnode = tgen.routers()[dut]
logger.info("[DUT: %s]: Verifying vrf vni details :", dut)
cmd = "show vrf vni json"
vrf_all_vni_json = run_frr_cmd(rnode, cmd, isjson=True)
if not bool(vrf_all_vni_json):
errormsg = "No output for '{}' cli".format(cmd)
return errormsg
if "vrfs" in input_dict[dut]:
for vrfs in input_dict[dut]["vrfs"]:
for vrf, vrf_dict in vrfs.items():
found = False
for vrf_vni_json in vrf_all_vni_json["vrfs"]:
if "vrf" in vrf_vni_json:
if vrf_vni_json["vrf"] != vrf:
continue
for attribute in vrf_dict.keys():
if vrf_dict[attribute] == vrf_vni_json[attribute]:
found = True
logger.info(
"[DUT %s]: VRF: %s, "
"verifying %s "
", Found Expected: %s "
"[PASSED]!!",
dut,
vrf,
attribute,
vrf_vni_json[attribute],
)
else:
errormsg = (
"[DUT: %s] VRF: %s, "
"verifying %s [FAILED!!] "
", EXPECTED : %s "
", FOUND : %s"
% (
dut,
vrf,
attribute,
vrf_dict[attribute],
vrf_vni_json[attribute],
)
)
return errormsg
else:
errormsg = "[DUT: %s] VRF: %s " "is not present in JSON" % (
dut,
vrf,
)
return errormsg
if found:
logger.info(
"[DUT %s] Verifying VRF: %s " " details [PASSED]!!",
dut,
vrf,
)
return True
else:
errormsg = (
"[DUT: %s] Failed:" " vrf details are not present in input data" % (dut)
)
return errormsg
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
return False
|
gpl-2.0
| -3,059,218,530,192,539,600
| 33.986071
| 96
| 0.456985
| false
| 4.476367
| true
| false
| false
|
Zarthus/Reconcile
|
tools/ignorelist.py
|
1
|
2663
|
"""
The MIT License (MIT)
Copyright (c) 2014 - 2015 Jos "Zarthus" Ahrens and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import re
class IgnoreList:
def __init__(self, ignorelist):
if ignorelist:
self.ignorelist = ignorelist
else:
self.ignorelist = []
def isIgnored(self, target):
if target.lower() in self.ignorelist:
return True
for user in self.ignorelist:
if "*" in user:
userRegex = self.compileIgnore(user)
if userRegex.match(target):
return True
return False
def isIgnoredWildcard(self, wctarget):
if "*" not in wctarget:
return self.isIgnored(wctarget)
target = self.compileIgnore(wctarget)
for user in self.ignorelist:
if target.match(user):
return True
if "*" in user:
userRegex = self.compileIgnore(user)
if userRegex.match(wctarget):
return True
return False
def ignore(self, target):
if target.lower() in self.ignorelist:
return False
self.ignorelist.append(target.lower())
return True
def unignore(self, target):
if target.lower() in self.ignorelist:
self.ignorelist.remove(target.lower())
return True
return False
def getIgnoreList(self):
return self.ignorelist
def compileIgnore(self, target):
return re.compile(re.escape(target)
.replace("\\*", ".*")
.replace("\\?", "."), re.I)
|
mit
| 5,329,721,178,999,119,000
| 30.702381
| 77
| 0.640631
| false
| 4.521222
| false
| false
| false
|
pierce403/EmpirePanel
|
lib/modules/situational_awareness/network/powerview/user_hunter.py
|
1
|
5974
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-UserHunter',
'Author': ['@harmj0y'],
'Description': ('Finds which machines users of a specified group are logged into. '
'Part of PowerView.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'https://github.com/PowerShellMafia/PowerSploit/blob/dev/Recon/'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'ComputerName' : {
'Description' : 'Hosts to enumerate.',
'Required' : False,
'Value' : ''
},
'ComputerFilter' : {
'Description' : 'Host filter name to query AD for, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'GroupName' : {
'Description' : 'Group name to query for target users.',
'Required' : False,
'Value' : ''
},
'TargetServer' : {
'Description' : 'Hunt for users who are effective local admins on a target server.',
'Required' : False,
'Value' : ''
},
'UserName' : {
'Description' : 'Specific username to search for.',
'Required' : False,
'Value' : ''
},
'UserFilter' : {
'Description' : 'A customized ldap filter string to use for user enumeration, e.g. "(description=*admin*)"',
'Required' : False,
'Value' : ''
},
'StopOnSuccess' : {
'Description' : 'Switch. Stop hunting after finding after finding a target user.',
'Required' : False,
'Value' : ''
},
'NoPing' : {
'Description' : "Don't ping each host to ensure it's up before enumerating.",
'Required' : False,
'Value' : ''
},
'CheckAccess' : {
'Description' : 'Switch. Check if the current user has local admin access to found machines.',
'Required' : False,
'Value' : ''
},
'Delay' : {
'Description' : 'Delay between enumerating hosts, defaults to 0.',
'Required' : False,
'Value' : ''
},
'Domain' : {
'Description' : 'The domain to use for the query, defaults to the current domain.',
'Required' : False,
'Value' : ''
},
'DomainController' : {
'Description' : 'Domain controller to reflect LDAP queries through.',
'Required' : False,
'Value' : ''
},
'ShowAll' : {
'Description' : 'Switch. Return all user location results without filtering.',
'Required' : False,
'Value' : ''
},
'Stealth' : {
'Description' : 'Switch. Only enumerate sessions from connonly used target servers.',
'Required' : False,
'Value' : ''
},
'Threads' : {
'Description' : 'The maximum concurrent threads to execute.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
moduleName = self.info["Name"]
# read in the common powerview.ps1 module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/powerview.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
# get just the code needed for the specified function
script = helpers.generate_dynamic_powershell_script(moduleCode, moduleName)
script += moduleName + " "
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
script += ' | fl | Out-String | %{$_ + \"`n\"};"`n'+str(moduleName)+' completed!"'
return script
|
bsd-3-clause
| -559,141,152,159,625,900
| 36.572327
| 128
| 0.4307
| false
| 4.692852
| false
| false
| false
|
kernsuite-debian/lofar
|
MAC/Deployment/data/Coordinates/CoordMenu.py
|
1
|
10615
|
#!/usr/bin/env python
# P.Donker ASTRON
# and Arno Schoenmakers the Great
import sys
import pg
from subprocess import Popen
import os
import getpass
from optparse import OptionParser
from database import getDBname, getDBhost, getDBport, getDBuser
VERSION = '0.0.2' # version of this script
default_targetdate='2009.5'
def menu():
print("""
|=====================================|
| Coordinates menu |
|=====================================|
| 0 do all (1,2,3,4,5,6,7,9,11) |
| 1 destroy and create CDB |
| 2 create CDB objects |
| 3 load all normal-vectors |
| 4 load all rotation matrices |
| 5 load all hba_rotations |
| 6 calculate all HBADeltas |
| 7 load all ETRF(expected) files |
| 8 load one measurement file |
| 9 transform all ETRF to ITRF |
| 10 transform one ETRF to ITRF |
| 11 make all conf files |
| 12 make one conf file |
| Q quit |
|_____________________________________|
""")
def get_input_with_default(prompt, default_value):
answer = default_value
try:
input = raw_input # Python2 and Python3 compatible
except NameError:
pass
answer = input(prompt+" ["+str(default_value)+"]: ")
if (len(answer) == 0):
answer = default_value
return answer
def create_cdb():
print('Creating new database')
res = Popen('./create_CDB.sh').wait()
print(res)
def create_cdb_objects():
print('Creating database objects')
res = Popen('./create_CDB_objects.py').wait()
print(res)
def load_normal_vectors():
print('Loading normal vectors')
filename = get_input_with_default("enter filename to load", "data/normal_vectors.dat")
if len(filename) == 0:
print('Error, No filename given')
sys.exit()
if not os.path.exists(filename):
print("File does not exist")
sys.exit()
res = Popen(['./load_normal_vectors.py', filename]).wait()
if (res != 0):
sys.exit(1)
# time.sleep(3)
def load_rotation_matrices():
print('Loading rotation matrices')
filename = get_input_with_default("enter filename to load", "data/rotation_matrices.dat")
if len(filename) == 0:
print('Error, No filename given')
sys.exit()
if not os.path.exists(filename):
print("File does not exist")
sys.exit()
res = Popen(['./load_rotation_matrices.py', filename]).wait()
if (res != 0):
sys.exit(1)
# time.sleep(3)
def load_hba_rotations():
print('Loading hba field rotations')
filename = get_input_with_default("enter filename to load", "data/hba-rotations.csv")
if len(filename) == 0:
print('Error, No filename given')
sys.exit()
if not os.path.exists(filename):
print("File does not exist")
sys.exit()
res = Popen(['./load_hba_rotations.py', filename]).wait()
if (res != 0):
sys.exit(1)
# time.sleep(3)
def calculate_hba_deltas():
print('calculating hba-deltas')
# time.sleep(3)
res = Popen(['./calc_hba_deltas.py']).wait()
if (res != 0):
sys.exit(1)
def load_all_etrf():
print('loading all ETRF files from .//ETRF_FILES')
os.chdir(os.curdir+'/ETRF_FILES')
dirs = sorted(os.listdir(os.curdir))
for dir in dirs:
os.chdir(os.curdir+'/'+dir)
files = os.listdir(os.curdir)
for filename in files:
if not os.path.exists(filename):
print("File ",filename,"does not exist")
sys.exit()
res = Popen(['../../load_expected_pos.py', filename]).wait()
if (res != 0):
sys.exit(1)
os.chdir(os.pardir)
os.chdir(os.pardir)
def load_measurement():
print('load one measurement file')
filename = get_input_with_default("enter filename to load", "")
if len(filename) == 0:
print('Error, No filename given')
sys.exit()
if not os.path.exists(filename):
print("File ",filename,"does not exist")
sys.exit()
res = Popen(['./load_measurementfile.py', filename]).wait()
if (res != 0):
sys.exit(1)
def transform_all(db_host, db_port, db_name, db_user, db_password):
db = pg.connect(user=db_user, host=db_host, dbname=db_name, port=db_port, passwd=db_password)
print('Transform all ETRF coordinates to ITRF coordinates for given date')
target = get_input_with_default("Enter target_date", default_targetdate)
sql = "select distinct o.stationname from object o inner join field_rotations r on r.id = o.id"
all_stations = db.query(sql).getresult()
sql = "select distinct o.stationname from object o inner join reference_coord r on r.id = o.id"
ref_stations = db.query(sql).getresult()
for stationname in ref_stations:
station = stationname[0]
if 0 != Popen(['./calc_coordinates.py', station, "LBA", target]).wait():
sys.exit(1)
if 0 != Popen(['./calc_coordinates.py', station, "CLBA", target]).wait():
sys.exit(1)
# if station[:1] == 'C': # core station
if 0 != Popen(['./calc_coordinates.py', station, "HBA0", target]).wait():
sys.exit(1)
if 0 != Popen(['./calc_coordinates.py', station, "CHBA0", target]).wait():
sys.exit(1)
if 0 != Popen(['./calc_coordinates.py', station, "HBA1", target]).wait():
sys.exit(1)
if 0 != Popen(['./calc_coordinates.py', station, "CHBA1", target]).wait():
sys.exit(1)
# else: #remote or international station
if 0 != Popen(['./calc_coordinates.py', station, "HBA", target]).wait():
sys.exit(1)
if 0 != Popen(['./calc_coordinates.py', station, "CHBA", target]).wait():
sys.exit(1)
db.close()
missing_stations = list(set(all_stations) - set(ref_stations))
for stationname in missing_stations:
station = stationname[0]
print("Station with known HBA rotation but no ETRF: ",station)
def transform_one():
print('Transform ETRF coordinates to ITRF coordinates for given station and date')
station = get_input_with_default("Enter station ", "")
anttype = get_input_with_default("Enter type (LBA|HBA|HBA0|HBA1|CLBA|CHBA0|CHBA1|CHBA)", "")
target = get_input_with_default("Enter target_date ", default_targetdate)
res = Popen(['./calc_coordinates.py', station, anttype, target]).wait()
if (res != 0):
sys.exit(1)
def make_all_conf_files(db_host, db_port, db_name, db_user, db_password):
db = pg.connect(user=db_user, host=db_host, dbname=db_name, port=db_port, passwd=db_password)
print('Make all AntennaField.conf and iHBADeltas.conf files for given date')
target = get_input_with_default("Enter target_date", default_targetdate)
query = """select distinct o.stationname from
object o inner join reference_coord r on r.id = o.id"""
results = db.query(query).getresult()
for stationname in results:
station = stationname[0]
res = Popen(['./make_conf_files.py', station, target]).wait()
if (res != 0):
sys.exit(1)
res = Popen(['./make_all_station_file.py', target]).wait()
if (res != 0):
sys.exit(1)
db.close()
def make_one_conf_file():
print('Make one AntennaField.conf and iHBADeltas.conf file for given date')
station = get_input_with_default("Enter station ", "")
target = get_input_with_default("Enter target_date", default_targetdate)
res = Popen(['./make_conf_files.py', station, target]).wait()
if (res != 0):
sys.exit(1)
if __name__ == "__main__":
parser = OptionParser("Usage: %prog")
parser.add_option("-D", "--database",
dest="dbName",
type="string",
default=getDBname(),
help="Name of StationCoordinates database to use")
parser.add_option("-H", "--host",
dest="dbHost",
type="string",
default=getDBhost(),
help="Hostname of StationCoordinates database")
parser.add_option("-P", "--port",
dest="dbPort",
type="int",
default=getDBport(),
help="Port of StationCoordinates database")
parser.add_option("-U", "--user",
dest="dbUser",
type="string",
default=getDBuser(),
help="Username of StationCoordinates database")
# parse arguments
(options, args) = parser.parse_args()
dbName = options.dbName
dbHost = options.dbHost
dbPort = options.dbPort
dbUser = options.dbUser
dbPassword = None
while(1):
menu()
try:
input = raw_input # Python2 and Python3 compatible
except NameError:
pass
sel = input('Enter choice :')
if sel.upper() == 'Q':
sys.exit(1)
if sel == '1':
create_cdb()
if sel == '2':
create_cdb_objects()
if sel == '3':
load_normal_vectors()
if sel == '4':
load_rotation_matrices()
if sel == '5':
load_hba_rotations()
if sel == '6':
calculate_hba_deltas()
if sel == '7':
load_all_etrf()
if sel == '8':
load_measurement()
if sel == '9':
if dbPassword is None:
dbPassword = getpass.getpass("Database password:")
transform_all(dbHost, dbPort, dbName, dbUser, dbPassword)
if sel == '10':
transform_one()
if sel == '11':
if dbPassword is None:
dbPassword = getpass.getpass("Database password:")
make_all_conf_files(dbHost, dbPort, dbName, dbUser, dbPassword)
if sel == '12':
make_one_conf_file()
if sel == '0':
if dbPassword is None:
dbPassword = getpass.getpass("Database password:")
create_cdb()
create_cdb_objects()
load_normal_vectors()
load_rotation_matrices()
load_hba_rotations()
calculate_hba_deltas()
load_all_etrf()
transform_all(dbHost, dbPort, dbName, dbUser, dbPassword)
make_all_conf_files(dbHost, dbPort, dbName, dbUser, dbPassword)
|
gpl-3.0
| -5,528,725,688,728,908,000
| 33.464286
| 99
| 0.557419
| false
| 3.702476
| false
| false
| false
|
JaneliaSciComp/osgpyplusplus
|
examples/osg-tutorial/tut2_textures.py
|
1
|
6318
|
#!/bin/env python
# Translated into python from C++ tutorial at
# http:#trac.openscenegraph.org/projects/osg/wiki/Support/Tutorials/Textures
from osgpypp import osg, osgDB, osgViewer
import sys
# Creating Textured Geometry using StateSets
# Goals
# Add a texture to geometry defined by OpenGL drawing primitives introduced in
# tutorial Basic geometry.
# Background
# The previous tutorial introduced viewing scenes that include basic shapes
# created from OpenGL primitives. This section explains how to add textures to
# these shapes. To make the code easier to use, we'll put the pyramid code
# into a function that creates a geode and returns a pointer to it. The
# following code is from tutorial Basic geometry.
def createPyramid():
pyramidGeode = osg.Geode()
pyramidGeometry = osg.Geometry()
pyramidGeode.addDrawable(pyramidGeometry)
# Specify the vertices:
pyramidVertices = osg.Vec3Array()
pyramidVertices.append( osg.Vec3(0, 0, 0) ) # front left
pyramidVertices.append( osg.Vec3(2, 0, 0) ) # front right
pyramidVertices.append( osg.Vec3(2, 2, 0) ) # back right
pyramidVertices.append( osg.Vec3( 0,2, 0) ) # back left
pyramidVertices.append( osg.Vec3( 1, 1,2) ) # peak
# Associate this set of vertices with the geometry associated with the
# geode we added to the scene.
pyramidGeometry.setVertexArray( pyramidVertices )
# Create a QUAD primitive for the base by specifying the
# vertices from our vertex list that make up this QUAD:
pyramidBase = osg.DrawElementsUInt(osg.PrimitiveSet.QUADS, 0)
pyramidBase.append(3)
pyramidBase.append(2)
pyramidBase.append(1)
pyramidBase.append(0)
# Add this primitive to the geometry:
# pyramidGeometry.addPrimitiveSet(pyramidBase)
# code to create other faces goes here!
pyramidGeometry.addPrimitiveSet(pyramidBase)
# Repeat the same for each of the four sides. Again, vertices are specified in counter-clockwise order.
pyramidFaceOne = osg.DrawElementsUInt(osg.PrimitiveSet.TRIANGLES, 0)
pyramidFaceOne.append(0)
pyramidFaceOne.append(1)
pyramidFaceOne.append(4)
pyramidGeometry.addPrimitiveSet(pyramidFaceOne)
pyramidFaceTwo = osg.DrawElementsUInt(osg.PrimitiveSet.TRIANGLES, 0)
pyramidFaceTwo.append(1)
pyramidFaceTwo.append(2)
pyramidFaceTwo.append(4)
pyramidGeometry.addPrimitiveSet(pyramidFaceTwo)
pyramidFaceThree = osg.DrawElementsUInt(osg.PrimitiveSet.TRIANGLES, 0)
pyramidFaceThree.append(2)
pyramidFaceThree.append(3)
pyramidFaceThree.append(4)
pyramidGeometry.addPrimitiveSet(pyramidFaceThree)
pyramidFaceFour = osg.DrawElementsUInt(osg.PrimitiveSet.TRIANGLES, 0)
pyramidFaceFour.append(3)
pyramidFaceFour.append(0)
pyramidFaceFour.append(4)
pyramidGeometry.addPrimitiveSet(pyramidFaceFour)
colors = osg.Vec4Array()
colors.append(osg.Vec4(1.0, 0.0, 0.0, 1.0) ) #index 0 red
colors.append(osg.Vec4(0.0, 1.0, 0.0, 1.0) ) #index 1 green
colors.append(osg.Vec4(0.0, 0.0, 1.0, 1.0) ) #index 2 blue
colors.append(osg.Vec4(1.0, 1.0, 1.0, 1.0) ) #index 3 white
colors.append(osg.Vec4(1.0, 0.0, 0.0, 1.0) ) #index 4 red
pyramidGeometry.setColorArray(colors)
pyramidGeometry.setColorBinding(osg.Geometry.BIND_PER_VERTEX)
# Since the mapping from vertices to texture coordinates is 1:1,
# we don't need to use an index array to map vertices to texture
# coordinates. We can do it directly with the 'setTexCoordArray'
# method of the Geometry class.
# This method takes a variable that is an array of two dimensional
# vectors (osg.Vec2). This variable needs to have the same
# number of elements as our Geometry has vertices. Each array element
# defines the texture coordinate for the cooresponding vertex in the
# vertex array.
texcoords = osg.Vec2Array(5)
texcoords[0].set(0.00,0.0) # tex coord for vertex 0
texcoords[1].set(0.25,0.0) # tex coord for vertex 1
texcoords[2].set(0.50,0.0) # ""
texcoords[3].set(0.75,0.0) # ""
texcoords[4].set(0.50,1.0) # ""
pyramidGeometry.setTexCoordArray(0,texcoords)
return pyramidGeode
# Loading a Texture, Creating a State Set, assigning it to a Node
# The method for rendering primitives is controlled using StateSets. This
# section of code demonstrates how to load a texture from file, create a
# StateSet in which this texture is enabled, and assign this StateSet to a
# node in the scene. The first section starts out the same as previous
# tutorials. Initialize a viewer and build a scene with a single pyramid.
# Declare a group to act as root node of a scene:
root = osg.Group()
pyramidGeode = createPyramid()
root.addChild(pyramidGeode)
# Now for adding a texture. Here we'll declare a texture instance and set
# its data variance as 'DYNAMIC'. (If we don't declare the texture as dynamic,
# some of the osg's optimization routines could remove it.) The texture class
# encapsulates OpenGL texture modes (wrap, filiter, etc.) as well as an
# osg.Image. The code below shows how to read an osg.Image instance from a
# file and associate this image with a texture.
KLN89FaceTexture = osg.Texture2D()
# protect from being optimized away as static state:
KLN89FaceTexture.setDataVariance(osg.Object.DYNAMIC)
# load an image by reading a file:
klnFace = osgDB.readImageFile("KLN89FaceB.tga")
if klnFace is None:
print " Couldn't find texture, quitting."
sys.exit(-1)
# Assign the texture to the image we read from file:
KLN89FaceTexture.setImage(klnFace)
# Textures can be associated with rendering StateSets. The next step is to
# create a StateSet, associate and enable our texture with this state set and
# assign the StateSet to our geometry.
# Create a StateSet with default settings:
stateOne = osg.StateSet()
# Assign texture unit 0 of our StateSet to the texture
# we just created and enable the texture.
stateOne.setTextureAttributeAndModes(0, KLN89FaceTexture, osg.StateAttribute.ON)
# Associate this state set with the Geode that contains
# the pyramid:
pyramidGeode.setStateSet(stateOne)
# The last step is the simulation loop:
viewer = osgViewer.Viewer()
#The final step is to set up and enter a simulation loop.
viewer.setSceneData( root )
viewer.run()
|
bsd-3-clause
| 5,011,954,014,060,613,000
| 38.242236
| 108
| 0.738841
| false
| 3.480992
| false
| false
| false
|
sbobovyc/GameTools
|
ImmunityDebugger/collectLoopRets.py
|
1
|
2256
|
#!/usr/bin/env python
import immlib
from immlib import LogBpHook, BpHook
class ReturnBP(BpHook):
def __init__(self):
BpHook.__init__(self)
def run(self, regs):
imm = immlib.Debugger()
eip = regs["EIP"]
imm.log("bp, EIP is 0x%08X " % eip)
imm.addKnowledge("0x%08X" % eip, eip)
#self.UnHook()
imm.deleteBreakpoint(eip, eip+4)
imm.run()
class ReturnLog(LogBpHook):
def __init__(self):
LogBpHook.__init__(self)
def run(self, regs):
imm = immlib.Debugger()
eip = regs["EIP"]
imm.log("log, EIP is 0x%08X " % eip)
imm.addKnowledge("0x%08X" % eip, eip)
self.UnHook()
imm.deleteBreakpoint(eip, eip+4)
def main(args):
imm = immlib.Debugger()
module = imm.getModule(imm.getDebuggedName())
imm.log("module %s at 0x%08X" % (module.getName(), module.getBase()))
use_log_bp = True
if len(args) > 0 and args[0] == "false":
imm.log("Using non logging bp")
use_log_bp = False
# make sure module is analysed
if not module.isAnalysed():
module.Analyse()
knowledge = imm.listKnowledge()
hooked = 0
not_hooked = 0
for f in imm.getAllFunctions(module.getBase()):
for ret in imm.getFunctionEnd(f):
if "0x%08X" % ret not in knowledge:
#imm.log("function 0x%08X ret at 0x%08X" % (f, ret))
if use_log_bp:
hook = ReturnLog()
hook.add("ReturnLog 0x%08X"%f, ret)
hooked +=1
else:
hook = ReturnBP()
hook.add("ReturnBP 0x%08X"%f, ret)
hooked +=1
# i think fasthook because fast hook is over writing rets, getFunctionEnd is having trouble
#fast = immlib.FastLogHook(imm)
#fast.logFunction(ret)
#fast.logRegister("EIP")
#fast.Hook()
else:
not_hooked += 1
imm.log("Hooked %i, skipped %i" % (hooked, not_hooked))
return "Found returns, attached hooks"
|
gpl-3.0
| 2,774,366,314,029,570,600
| 29.90411
| 107
| 0.503103
| false
| 3.503106
| false
| false
| false
|
JeanOlivier/pyHegel
|
pyHegel/__init__.py
|
1
|
2890
|
# -*- coding: utf-8 -*-
########################## Copyrights and license ############################
# #
# Copyright 2011-2015 Christian Lupien <christian.lupien@usherbrooke.ca> #
# #
# This file is part of pyHegel. http://github.com/lupien/pyHegel #
# #
# pyHegel is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# pyHegel is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with pyHegel. If not, see <http://www.gnu.org/licenses/>. #
# #
##############################################################################
from __future__ import absolute_import
# Use version according to option 5 in https://packaging.python.org/en/latest/single_source_version.html
# make sure that new versions numbers compare properly when using
# pkg_resources.parse_version
__version__ = '1.0.0rc2'
__copyright__ = '2011-2015 Christian Lupien'
def start_pyHegel():
""" This is the recommanded way to start pyHegel.
It starts ipython in a standard way (pylab, autocall enabled,...)
and then loads and initializes the pyHegel commands.
If the python session was started with command line arguments
--console, it will try to start pyHegel in the Console program
that comes with pythonxy. This is windows only.
If you later need access to the commands in a module:
import pyHegel.commands as cmds
cmds.get(somedevice)
or
from pyHegel.commands import *
get(somedevice)
or any other variants you want.
"""
import sys
import os
if os.name == 'nt' and len(sys.argv) == 2 and sys.argv[1] == '--console':
start_console()
else:
from . import main
main.main_start()
def start_console():
from . import win_console_helper
win_console_helper.start_console()
|
gpl-3.0
| 6,260,107,342,247,178,000
| 47.166667
| 104
| 0.510035
| false
| 4.76112
| false
| false
| false
|
dknlght/dkodi
|
src/script.module.urlresolver/lib/urlresolver/plugins/vevio.py
|
1
|
2891
|
"""
Plugin for UrlResolver
Copyright (C) 2018 jsergio
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from six.moves import urllib_error
import json
from urlresolver.plugins.lib import helpers
from urlresolver import common
from urlresolver.common import i18n
from urlresolver.resolver import UrlResolver, ResolverError
class VevIoResolver(UrlResolver):
name = "vevio"
domains = ["vev.io", "vev.red"]
pattern = r'(?://|\.)(vev\.(?:io|red))/(?:embed/)?([0-9a-zA-Z]+)'
def __init__(self):
self.headers = {'User-Agent': common.SMU_USER_AGENT}
def get_media_url(self, host, media_id):
try:
result = self.__check_auth(media_id)
if not result:
result = self.__auth_ip(media_id)
except ResolverError:
raise
if result:
return helpers.pick_source(helpers.sort_sources_list(result)) + helpers.append_headers(self.headers)
raise ResolverError("Unable to retrieve video")
def __auth_ip(self, media_id):
header = i18n('vevio_auth_header')
line1 = i18n('auth_required')
line2 = i18n('visit_link')
line3 = i18n('click_pair') % 'https://vev.io/pair'
with common.kodi.CountdownDialog(header, line1, line2, line3) as cd:
return cd.start(self.__check_auth, [media_id])
def __check_auth(self, media_id):
common.logger.log('Checking Auth: %s' % media_id)
url = self.get_url(media_id)
try:
js_result = json.loads(self.net.http_GET(url, headers=self.headers).content)
except ValueError:
raise ResolverError('Unusable Authorization Response')
except urllib_error.HTTPError as e:
if e.code == 400 or e.code == 401:
js_result = {}
else:
raise
common.logger.log('Auth Result: %s' % js_result)
if js_result.get('qualities', {}):
return [(qual.get('size')[1], qual.get('src')) for qual in js_result.get('qualities')]
else:
return []
def get_url(self, media_id, host='vev.io'):
return self._default_get_url(host, media_id, template='https://{host}/api/pair/{media_id}')
@classmethod
def isPopup(self):
return True
|
gpl-2.0
| -3,188,007,391,110,179,000
| 35.1375
| 112
| 0.628156
| false
| 3.803947
| false
| false
| false
|
GPflow/GPflowOpt
|
gpflowopt/acquisition/pof.py
|
1
|
3594
|
# Copyright 2017 Joachim van der Herten
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .acquisition import Acquisition
from gpflow import settings
import numpy as np
import tensorflow as tf
float_type = settings.dtypes.float_type
stability = settings.numerics.jitter_level
class ProbabilityOfFeasibility(Acquisition):
"""
Probability of Feasibility acquisition function for sampling feasible regions. Standard acquisition function for
Bayesian Optimization with black-box expensive constraints.
Key reference:
::
@article{Schonlau:1997,
title={Computer experiments and global optimization},
author={Schonlau, Matthias},
year={1997},
publisher={University of Waterloo}
}
The acquisition function measures the probability of the latent function
being smaller than a threshold for a candidate point.
.. math::
\\alpha(\\mathbf x_{\\star}) = \\int_{-\\infty}^{0} \\, p(f_{\\star}\\,|\\, \\mathbf x, \\mathbf y, \\mathbf x_{\\star} ) \\, d f_{\\star}
"""
def __init__(self, model, threshold=0.0, minimum_pof=0.5):
"""
:param model: GPflow model (single output) representing our belief of the constraint
:param threshold: Observed values lower than the threshold are considered valid
:param minimum_pof: minimum pof score required for a point to be valid.
For more information, see docstring of feasible_data_index
"""
super(ProbabilityOfFeasibility, self).__init__(model)
self.threshold = threshold
self.minimum_pof = minimum_pof
def constraint_indices(self):
return np.arange(self.data[1].shape[1])
def feasible_data_index(self):
"""
Returns a boolean array indicating which points are feasible (True) and which are not (False).
Answering the question *which points are feasible?* is slightly troublesome in case noise is present.
Directly relying on the noisy data and comparing it to self.threshold does not make much sense.
Instead, we rely on the model belief using the PoF (a probability between 0 and 1).
As the implementation of the PoF corresponds to the cdf of the (normal) predictive distribution in
a point evaluated at the threshold, requiring a minimum pof of 0.5 implies the mean of the predictive
distribution is below the threshold, hence it is marked as feasible. A minimum pof of 0 marks all points valid.
Setting it to 1 results in all invalid.
:return: boolean ndarray (size N)
"""
pred = self.evaluate(self.data[0])
return pred.ravel() > self.minimum_pof
def build_acquisition(self, Xcand):
candidate_mean, candidate_var = self.models[0].build_predict(Xcand)
candidate_var = tf.maximum(candidate_var, stability)
normal = tf.contrib.distributions.Normal(candidate_mean, tf.sqrt(candidate_var))
return normal.cdf(tf.constant(self.threshold, dtype=float_type), name=self.__class__.__name__)
|
apache-2.0
| -1,453,852,975,451,257,900
| 41.282353
| 145
| 0.687257
| false
| 4.093394
| false
| false
| false
|
adamchainz/django-mysql
|
src/django_mysql/utils.py
|
1
|
9970
|
import os
import subprocess
import time
from collections import defaultdict
from queue import Empty, Queue
from threading import Lock, Thread
from weakref import WeakKeyDictionary
import django
from django.db import DEFAULT_DB_ALIAS
from django.db import connection as default_connection
from django.db import connections
class WeightedAverageRate:
"""
Adapted from percona-toolkit - provides a weighted average counter to keep
at a certain rate of activity (row iterations etc.).
"""
def __init__(self, target_t, weight=0.75):
"""
target_t - Target time for t in update()
weight - Weight of previous n/t values
"""
self.target_t = target_t
self.avg_n = 0.0
self.avg_t = 0.0
self.weight = weight
def update(self, n, t):
"""
Update weighted average rate. Param n is generic; it's how many of
whatever the caller is doing (rows, checksums, etc.). Param s is how
long this n took, in seconds (hi-res or not).
Parameters:
n - Number of operations (rows, etc.)
t - Amount of time in seconds that n took
Returns:
n adjusted to meet target_t based on weighted decaying avg rate
"""
if self.avg_n and self.avg_t:
self.avg_n = (self.avg_n * self.weight) + n
self.avg_t = (self.avg_t * self.weight) + t
else:
self.avg_n = n
self.avg_t = t
return int(self.avg_rate * self.target_t)
@property
def avg_rate(self):
try:
return self.avg_n / self.avg_t
except ZeroDivisionError:
# Assume a small amount of time, not 0
return self.avg_n / 0.001
class StopWatch:
"""
Context manager for timing a block
"""
def __enter__(self):
self.start_time = time.time()
return self
def __exit__(self, *args, **kwargs):
self.end_time = time.time()
self.total_time = self.end_time - self.start_time
def format_duration(total_seconds):
hours = total_seconds // 3600
minutes = (total_seconds % 3600) // 60
seconds = total_seconds % 60
out = []
if hours > 0:
out.extend([str(hours), "h"])
if hours or minutes:
out.extend([str(minutes), "m"])
out.extend([str(seconds), "s"])
return "".join(out)
if django.VERSION >= (3, 0):
def connection_is_mariadb(connection):
return connection.vendor == "mysql" and connection.mysql_is_mariadb
else:
_is_mariadb_cache = WeakKeyDictionary()
def connection_is_mariadb(connection):
if connection.vendor != "mysql":
return False
if connection is default_connection:
connection = connections[DEFAULT_DB_ALIAS]
try:
return _is_mariadb_cache[connection]
except KeyError:
with connection.temporary_connection():
server_info = connection.connection.get_server_info()
is_mariadb = "MariaDB" in server_info
_is_mariadb_cache[connection] = is_mariadb
return is_mariadb
def settings_to_cmd_args(settings_dict):
"""
Copied from django 1.8 MySQL backend DatabaseClient - where the runshell
commandline creation has been extracted and made callable like so.
"""
args = ["mysql"]
db = settings_dict["OPTIONS"].get("db", settings_dict["NAME"])
user = settings_dict["OPTIONS"].get("user", settings_dict["USER"])
passwd = settings_dict["OPTIONS"].get("passwd", settings_dict["PASSWORD"])
host = settings_dict["OPTIONS"].get("host", settings_dict["HOST"])
port = settings_dict["OPTIONS"].get("port", settings_dict["PORT"])
cert = settings_dict["OPTIONS"].get("ssl", {}).get("ca")
defaults_file = settings_dict["OPTIONS"].get("read_default_file")
# Seems to be no good way to set sql_mode with CLI.
if defaults_file:
args += ["--defaults-file=%s" % defaults_file]
if user:
args += ["--user=%s" % user]
if passwd:
args += ["--password=%s" % passwd]
if host:
if "/" in host:
args += ["--socket=%s" % host]
else:
args += ["--host=%s" % host]
if port:
args += ["--port=%s" % port]
if cert:
args += ["--ssl-ca=%s" % cert]
if db:
args += [db]
return args
programs_memo = {}
def have_program(program_name):
global programs_memo
if program_name not in programs_memo:
status = subprocess.call(["which", program_name], stdout=subprocess.PIPE)
programs_memo[program_name] = status == 0
return programs_memo[program_name]
def pt_fingerprint(query):
"""
Takes a query (in a string) and returns its 'fingerprint'
"""
if not have_program("pt-fingerprint"): # pragma: no cover
raise OSError("pt-fingerprint doesn't appear to be installed")
thread = PTFingerprintThread.get_thread()
thread.in_queue.put(query)
return thread.out_queue.get()
class PTFingerprintThread(Thread):
"""
Class for a singleton background thread to pass queries to pt-fingerprint
and get their fingerprints back. This is done because the process launch
time is relatively expensive and it's useful to be able to fingerprinting
queries quickly.
The get_thread() class method returns the singleton thread - either
instantiating it or returning the existing one.
The thread launches pt-fingerprint with subprocess and then takes queries
from an input queue, passes them the subprocess and returns the fingerprint
to an output queue. If it receives no queries in PROCESS_LIFETIME seconds,
it closes the subprocess and itself - so you don't have processes hanging
around.
"""
the_thread = None
life_lock = Lock()
PROCESS_LIFETIME = 60.0 # seconds
@classmethod
def get_thread(cls):
with cls.life_lock:
if cls.the_thread is None:
in_queue = Queue()
out_queue = Queue()
thread = cls(in_queue, out_queue)
thread.daemon = True
thread.in_queue = in_queue
thread.out_queue = out_queue
thread.start()
cls.the_thread = thread
return cls.the_thread
def __init__(self, in_queue, out_queue, **kwargs):
self.in_queue = in_queue
self.out_queue = out_queue
super().__init__(**kwargs)
def run(self):
# pty is unix/linux only
import pty # noqa
global fingerprint_thread
master, slave = pty.openpty()
proc = subprocess.Popen(
["pt-fingerprint"], stdin=subprocess.PIPE, stdout=slave, close_fds=True
)
stdin = proc.stdin
stdout = os.fdopen(master)
while True:
try:
query = self.in_queue.get(timeout=self.PROCESS_LIFETIME)
except Empty:
self.life_lock.acquire()
# We timed out, but there was something put into the queue
# since
if (
self.__class__.the_thread is self and self.in_queue.qsize()
): # pragma: no cover
self.life_lock.release()
break
# Die
break
stdin.write(query.encode("utf-8"))
if not query.endswith(";"):
stdin.write(b";")
stdin.write(b"\n")
stdin.flush()
fingerprint = stdout.readline()
self.out_queue.put(fingerprint.strip())
stdin.close()
self.__class__.the_thread = None
self.life_lock.release()
def collapse_spaces(string):
bits = string.replace("\n", " ").split(" ")
return " ".join(filter(None, bits))
def index_name(model, *field_names, **kwargs):
"""
Returns the name of the index existing on field_names, or raises KeyError
if no such index exists.
"""
if not len(field_names):
raise ValueError("At least one field name required")
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
if len(kwargs):
raise ValueError("The only supported keyword argument is 'using'")
existing_fields = {field.name: field for field in model._meta.fields}
fields = [existing_fields[name] for name in field_names if name in existing_fields]
if len(fields) != len(field_names):
unfound_names = set(field_names) - {field.name for field in fields}
raise ValueError("Fields do not exist: " + ",".join(unfound_names))
column_names = tuple(field.column for field in fields)
list_sql = get_list_sql(column_names)
with connections[using].cursor() as cursor:
cursor.execute(
"""SELECT `INDEX_NAME`, `SEQ_IN_INDEX`, `COLUMN_NAME`
FROM INFORMATION_SCHEMA.STATISTICS
WHERE TABLE_SCHEMA = DATABASE() AND
TABLE_NAME = %s AND
COLUMN_NAME IN {list_sql}
ORDER BY `INDEX_NAME`, `SEQ_IN_INDEX` ASC
""".format(
list_sql=list_sql
),
(model._meta.db_table,) + column_names,
)
indexes = defaultdict(list)
for index_name, _, column_name in cursor.fetchall():
indexes[index_name].append(column_name)
indexes_by_columns = {tuple(v): k for k, v in indexes.items()}
try:
return indexes_by_columns[column_names]
except KeyError:
raise KeyError("There is no index on (" + ",".join(field_names) + ")")
def get_list_sql(sequence):
return "({})".format(",".join("%s" for x in sequence))
def mysql_connections():
conn_names = [DEFAULT_DB_ALIAS] + list(set(connections) - {DEFAULT_DB_ALIAS})
for alias in conn_names:
connection = connections[alias]
if connection.vendor == "mysql":
yield alias, connection
|
bsd-3-clause
| -5,315,907,786,015,990,000
| 30.550633
| 87
| 0.590672
| false
| 4.020161
| false
| false
| false
|
th0mmeke/toyworld
|
kinetics_2D.py
|
1
|
7872
|
"""
Created on 22/03/2013
@author: thom
"""
import random
import math
import logging
from rdkit.Chem import AllChem as Chem
from ULPS import Float_t
import config
class Kinetics2D(object):
@classmethod
def get_ke(cls, m, x, y):
return 0.5 * m * (x * x + y * y)
@classmethod
def get_speed(cls, x, y):
return math.sqrt(x * x + y * y)
@classmethod
def radial_to_xyz(cls, theta=None, r=None):
"""Always returns a 2-D x,y"""
if theta is None:
theta = random.uniform(0, 2.0 * math.pi)
if r is None:
r = random.uniform(0, 1)
y = math.sin(theta) * r
x = math.cos(theta) * r
return x, y
@classmethod
def xyz_to_radial(cls, x, y):
"""Always returns a 2-D theta,r"""
r = math.hypot(x, y)
theta = math.atan2(y, x)
return theta, r
@classmethod
def get_distance(cls, l1, l2):
return math.sqrt(sum([(_l1 - _l2) * (_l1 - _l2) for _l1, _l2 in zip(l1, l2)]))
@classmethod
def get_CM_energy(cls, mols):
"""Return KE of Centre of Mass: _ke = 1/2mv^2, where mv for the centre of mass = sum (mi * vi) for all particles i
:param mols: list of Molecule"""
total_mass = sum([mol.get_mass() for mol in mols])
return cls.get_ke(total_mass, *cls.get_CM_velocity(mols))
@classmethod
def get_CM_velocity(cls, mols):
"""Return the momentum (mdx,mdy) of the centre of mass for these particles"""
cm_momentum = [0, 0]
total_mass = sum([mol.get_mass() for mol in mols])
for mol in mols:
cm_momentum += mol.get_velocity() * mol.get_mass()
CM_velocity = cm_momentum / total_mass
logging.debug("CM velocity = {}".format(CM_velocity))
return CM_velocity
# for mol in mols:
# cm_momentum[0] += mol.get_mass() * mol.get_velocity()[0]
# cm_momentum[1] += mol.get_mass() * mol.get_velocity()[1]
# return [mv / total_mass for mv in cm_momentum]
@classmethod
def inelastic_collision(cls, reactant_mols, product_mols, energy_delta):
"""Determine velocities of product molecules following a collision of reactant molecules, for between one and three product molecules.
Model as a collision, followed by an explosion, meaning that the total momentum of the system is conserved - if two particles, each has equal and opposite momentum in CoM frame
Assume an impulse, or force, splitting the particles apart, acting equally on each particle
Then impulse J = mv2-mv1 and so momentum change will be the same for all particles
Implies that for two particles, equal and opposite mv in CoM frame, and for three particles, mv arranged in equilateral triangle
Post-conditions:
1. Sum in_mass = Sum out_mass - although #in_molecules ne #out_molecules
2. Vector speed and direction of CoM remains constant
3. in_KE + in_PE + in_IE = Sum out_KE + out_PE + out_IE or in_KE - delta_KE = out_KE
:param reactant_mols: reactants - must have total KE > 0
:type reactant_mols: list of Molecule
:param product_mols: products of reaction - must be 1, 2 or 3 products only
:type product_mols: list of Molecule
:param energy_delta: final KE = initial KE - energy_delta
"""
def total_mv(mv):
totals = [0, 0]
for mv_ in mv:
for dim in range(len(totals)):
totals[dim] += mv_[dim]
return totals
if len(product_mols) < 1 or len(product_mols) > 3:
raise ValueError()
logging.debug("reactant_mols = {}, product_mols = {}".format([Chem.MolToSmiles(mol) for mol in reactant_mols], [Chem.MolToSmiles(mol) for mol in product_mols]))
in_v = [mol.get_velocity() for mol in reactant_mols]
in_mass = [mol.get_mass() for mol in reactant_mols]
in_mv = [[m * v_ for v_ in v] for m, v in zip(in_mass, in_v)]
in_ke = sum([mol.get_kinetic_energy() for mol in reactant_mols])
in_ie = sum([mol.get_internal_energy() for mol in reactant_mols])
# Velocity of centre of mass after collision
# Momentums add to zero in the CoM frame
out_mass = [mol.get_mass() for mol in product_mols]
cm_in_v = cls.get_CM_velocity(reactant_mols)
cm_in_radial_v = cls.xyz_to_radial(*cm_in_v)
# Bound energy_of_collision to above zero (rounding errors for small values)
# consistent sense with that in discover_reaction - final_PE = initial_PE + energy_delta => final_KE = initial_KE - energy_delta
energy_of_collision = max(0, in_ke + in_ie - energy_delta - cls.get_CM_energy(reactant_mols))
if energy_of_collision <= 0:
raise ValueError
out_v_in_CoM_frame = []
if len(out_mass) == 1:
# One out particle is stationary in out_CoM frame
IE = energy_of_collision # inelastic collision -> loss of KE -> must go to IE
out_v_in_CoM_frame.append([0, 0])
elif len(out_mass) == 2:
ke_in_CM_frame = random.uniform(0, energy_of_collision)
IE = energy_of_collision - ke_in_CM_frame
mv = math.sqrt((2.0 * ke_in_CM_frame * out_mass[0] * out_mass[1]) / (out_mass[0] + out_mass[1]))
out_v_in_CoM_frame.append(cls.radial_to_xyz(cm_in_radial_v[0] + math.pi * 0.5, mv))
out_v_in_CoM_frame.append(cls.radial_to_xyz(cm_in_radial_v[0] + math.pi * 1.5, mv))
elif len(out_mass) == 3:
# Sum of vector momentums = 0, and in centre of momentum frame arranged as equilateral triangle, side mv
# Must then convert to velocities by dividing by particle mass, which means no longer equilateral...but unimportant, as only needed equilateral to initially arrange
ke_in_CM_frame = random.uniform(0, energy_of_collision) # The energy of the collision - over and above the energy of the centre of mass, which is invariant
IE = energy_of_collision - ke_in_CM_frame
mv = math.sqrt((2.0 * ke_in_CM_frame * out_mass[0] * out_mass[1] * out_mass[2]) / (out_mass[0] * out_mass[1] + out_mass[1] * out_mass[2] + out_mass[0] * out_mass[2]))
out_v_in_CoM_frame.append(cls.radial_to_xyz(cm_in_radial_v[0] + math.pi / 3.0, mv))
out_v_in_CoM_frame.append(cls.radial_to_xyz(cm_in_radial_v[0] - math.pi / 3.0, mv))
out_v_in_CoM_frame.append(cls.radial_to_xyz(cm_in_radial_v[0] + math.pi, mv))
# Now convert from momentums to velocities by scaling by 1/mass
out_v_in_CoM_frame = [[mv_component / mass for mv_component in particle_mv] for particle_mv, mass in zip(out_v_in_CoM_frame, out_mass)]
# Finally convert back from CoM frame to lab frame
out_v = [[v_ + cm_v_ for v_, cm_v_ in zip(v, cm_in_v)] for v in out_v_in_CoM_frame]
#########################
# Confirm post-conditions
# 1. Mass
assert Float_t.almost_equal(sum(in_mass), sum(out_mass))
# 2. Momentum
out_mv = [[m * v_ for v_ in v] for m, v in zip(out_mass, out_v)]
in_mv_total = total_mv(in_mv)
out_mv_total = total_mv(out_mv)
logging.debug("IN MV = {}, OUT MV = {}".format(in_mv_total, out_mv_total))
for in_, out_ in zip(in_mv_total, out_mv_total):
assert Float_t.almost_equal(in_, out_)
# 3. Energy
out_ke = sum([cls.get_ke(m, *v) for m, v in zip(out_mass, out_v)])
logging.debug("IN_KE + IN_IE = {}+{} = {}, OUT_KE + DELTA + IE = {} + {} + {} = {}".format(in_ke, in_ie, in_ke + in_ie, out_ke, energy_delta, IE, out_ke + energy_delta + IE))
assert Float_t.almost_equal(in_ke + in_ie, out_ke + energy_delta + IE, max_diff=config.EnergyTolerance)
return out_v, IE
|
gpl-3.0
| 6,768,914,614,100,645,000
| 43.982857
| 184
| 0.597053
| false
| 3.107777
| false
| false
| false
|
Diviyan-Kalainathan/causal-humans
|
ClusterAnalysis/v_test.py
|
1
|
2510
|
'''
Analyses the clusters and returns v-type of vars
Author : Diviyan Kalainathan
Date : 28/06/2016
DEPRECATED - Use plot-gen/Cluster_extraction instead
'''
import csv,numpy
def v_test(input_data,data_folder,num_clusters, num_vars, list_vars):
"""
:param input_data: Data used to do the clustering(String)
:param data_folder: Folder where the clustering output is(String)
:param num_clusters: Number of clusters(int)
:param num_vars:Number of variables to analyse(int)
:param list_vars:List of these vars(list[String])
:return: 0
"""
totaldata = numpy.zeros((num_vars, 2)) #0 for mean , #1 for
for n in range(num_vars):
col_data=[]
with open('input/' + input_data, 'rb') as totalfile:
datareader = csv.reader(totalfile, delimiter=';', quotechar='|')
header = next(datareader)
for row in datareader:
col_data+=[row[n]]
totaldata[n,0]=numpy.mean(col_data)
totaldata[n,1]=numpy.std(col_data)
cluster_size=numpy.zeros((num_clusters))
for i in range(num_clusters):
file = open('output/'+ data_folder +'/cluster_'+str(i)+'.csv')
cluster_size[i] = len(file.readlines())-2
total_size=numpy.sum(cluster_size)
for num_file in range(num_clusters):
with open('output/' + data_folder + '/cluster_similarity_' + str(int(num_file)) + '.csv', 'wb') as outputfile:
datawriter = csv.writer(outputfile, delimiter=';', quotechar='|')
datawriter.writerow(['Var name','V-type'])
for n_var in range(num_vars):
with open('output/'+ data_folder +'/cluster_'+str(num_file)+'.csv', 'rb') as datafile:
datareader = csv.reader(datafile, delimiter=';', quotechar='|')
header = next(datareader)
name_value=[]
for row in datareader:
name_value+=[row[n_var]]
result=[list_vars[n_var],((numpy.mean(name_value)-totaldata[n_var,0])/ numpy.sqrt(((total_size-cluster_size[num_file])/(total_size-1))*((totaldata[n_var,1]**2)/cluster_size[num_file])))]
# ! Calcul v-type
with open('output/' + data_folder + '/cluster_similarity_' + str(int(num_file)) + '.csv', 'a') as outputfile:
datawriter = csv.writer(outputfile, delimiter=';', quotechar='|',
lineterminator='\n')
datawriter.writerow(result)
return 0
|
mit
| 1,592,596,920,828,584,400
| 42.293103
| 202
| 0.584064
| false
| 3.632417
| false
| false
| false
|
wateraccounting/wa
|
Collect/GLEAM/DataAccess.py
|
1
|
8456
|
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
"""
Authors: Tim Hessels
UNESCO-IHE 2016
Contact: t.hessels@unesco-ihe.org
Repository: https://github.com/wateraccounting/wa
Module: Collect/GLEAM
"""
# import general python modules
import os
import numpy as np
import pandas as pd
import glob
from joblib import Parallel, delayed
import paramiko
import calendar
from netCDF4 import Dataset
# Water Accounting modules
import wa.General.data_conversions as DC
from wa import WebAccounts
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase):
"""
This function downloads GLEAM ET data
Keyword arguments:
Dir -- 'C:/file/to/path/'
Startdate -- 'yyyy-mm-dd'
Enddate -- 'yyyy-mm-dd'
latlim -- [ymin, ymax] (values must be between -50 and 50)
lonlim -- [xmin, xmax] (values must be between -180 and 180)
cores -- The number of cores used to run the routine. It can be 'False'
to avoid using parallel computing routines.
Waitbar -- 1 (Default) will print a waitbar
"""
# Check start and end date and otherwise set the date
if not Startdate:
Startdate = pd.Timestamp('2003-01-01')
if not Enddate:
Enddate = pd.Timestamp('2015-12-31')
# Make an array of the days of which the ET is taken
YearsDownloadstart = str(Startdate[0:4])
YearsDownloadend = str(Enddate[0:4])
Years = range(int(YearsDownloadstart),int(YearsDownloadend)+1)
# String Parameters
if TimeCase == 'daily':
VarCode = 'ET_GLEAM.V3.1b_mm-day-1_daily'
FTPprefix = 'data/v3.1b/'
TimeFreq = 'D'
Folder_name = 'Daily'
elif TimeCase == 'monthly':
VarCode = 'ET_GLEAM.V3.1b_mm-month-1_monthly'
FTPprefix = 'data/v3.1b/'
TimeFreq = 'M'
Folder_name = 'Monthly'
# Get end of month for Enddate
monthDownloadend = str(Enddate[5:7])
End_month = calendar.monthrange(int(YearsDownloadend),int(monthDownloadend))[1]
Enddate = '%d-%02d-%d' %(int(YearsDownloadend),int(monthDownloadend),int(End_month))
else:
raise KeyError("The input time interval is not supported")
Dates = pd.date_range(Startdate, Enddate, freq = TimeFreq)
# Make directory for the MODIS ET data
output_folder=os.path.join(Dir,'Evaporation', 'GLEAM', Folder_name)
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# Check variables
if latlim[0] < -50 or latlim[1] > 50:
print ('Latitude above 50N or below 50S is not possible.'
' Value set to maximum')
latlim[0] = np.max(latlim[0], -50)
latlim[1] = np.min(lonlim[1], 50)
if lonlim[0] < -180 or lonlim[1] > 180:
print ('Longitude must be between 180E and 180W.'
' Now value is set to maximum')
lonlim[0] = np.max(latlim[0], -180)
lonlim[1] = np.min(lonlim[1], 180)
# Collect the data from the GLEAM webpage and returns the data and lat and long in meters of those tiles
try:
Collect_data(FTPprefix, Years, output_folder, Waitbar)
except:
print "Was not able to download the file"
# Create Waitbar
print '\nProcess the GLEAM data'
if Waitbar == 1:
import wa.Functions.Start.WaitbarConsole as WaitbarConsole
total_amount = len(Dates)
amount = 0
WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)
# Pass variables to parallel function and run
args = [output_folder, latlim, lonlim, VarCode, TimeCase]
if not cores:
for Date in Dates:
RetrieveData(Date, args)
if Waitbar == 1:
amount += 1
WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)
results = True
else:
results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args)
for Date in Dates)
# Remove all .hdf files
os.chdir(output_folder)
files = glob.glob("*.nc")
for f in files:
os.remove(os.path.join(output_folder, f))
return(results)
def RetrieveData(Date, args):
"""
This function retrieves GLEAM ET data for a given date from the
www.gleam.eu server.
Keyword arguments:
Date -- 'yyyy-mm-dd'
args -- A list of parameters defined in the DownloadData function.
"""
# Argument
[output_folder, latlim, lonlim, VarCode, TimeCase] = args
# Adjust latlim to GLEAM dataset
latlim1=[latlim[1]*-1, latlim[0]*-1]
# select the spatial dataset
Ystart=int(np.floor((latlim1[0]+90)/0.25))
Yend=int(np.ceil((latlim1[1]+90)/0.25))
Xstart=int(np.floor((lonlim[0]+180)/0.25))
Xend=int(np.ceil((lonlim[1]+180)/0.25))
Year=Date.year
Month=Date.month
filename='E_' + str(Year) + '_GLEAM_v3.1b.nc'
local_filename = os.path.join(output_folder, filename)
f = Dataset(local_filename,mode='r')
if TimeCase == 'monthly':
# defines the start and end of the month
Datesend1=str(Date)
Datesend2=Datesend1.replace(Datesend1[8:10],"01")
Datesend3=Datesend2[0:10]
Datesend4=Datesend1[0:10]
Datestart = pd.date_range(Datesend3,Datesend4,freq = 'MS')
# determine the DOY-1 and DOYend (those are use to define the temporal boundaries of the yearly data)
DOY=int(Datestart[0].strftime('%j'))
DOYend=int(Date.strftime('%j'))
DOYDownload=DOY-1
Day = 1
Data = f.variables['E'][DOYDownload:DOYend,Xstart:Xend,Ystart:Yend]
data=np.array(Data)
f.close()
# Sum ET data in time and change the no data value into -999
dataSum=sum(data,1)
dataSum[dataSum<-100]=-999.000
dataCor=np.swapaxes(dataSum,0,1)
if TimeCase == 'daily':
Day = Date.day
# Define the DOY, DOY-1 is taken from the yearly dataset
DOY=int(Date.strftime('%j'))
DOYDownload=DOY-1
Data = f.variables['E'][DOYDownload,Xstart:Xend,Ystart:Yend]
data=np.array(Data)
f.close()
data[data<-100]=-999.000
dataCor=np.swapaxes(data,0,1)
# The Georeference of the map
geo_in=[lonlim[0], 0.25, 0.0, latlim[1], 0.0, -0.25]
# Name of the map
dataset_name=VarCode + '_' + str(Year) + '.' + str(Month).zfill(2) + '.' + str(Day).zfill(2) + '.tif'
output_file=os.path.join(output_folder, dataset_name)
# save data as tiff file
DC.Save_as_tiff(name=output_file, data=dataCor, geo=geo_in, projection="WGS84")
return True
def Collect_data(FTPprefix,Years,output_folder, Waitbar):
'''
This function downloads all the needed GLEAM files from hydras.ugent.be as a nc file.
Keywords arguments:
FTPprefix -- FTP path to the GLEAM data
Date -- 'yyyy-mm-dd'
output_folder -- 'C:/file/to/path/'
'''
# account of the SFTP server (only password is missing)
server='hydras.ugent.be'
portnumber=2225
username, password = WebAccounts.Accounts(Type='GLEAM')
# Create Waitbar
print '\nDownload GLEAM data'
if Waitbar == 1:
import wa.Functions.Start.WaitbarConsole as WaitbarConsole
total_amount2 = len(Years)
amount2 = 0
WaitbarConsole.printWaitBar(amount2, total_amount2, prefix = 'Progress:', suffix = 'Complete', length = 50)
for year in Years:
directory = os.path.join(FTPprefix, '%d' %year)
ssh=paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(server, port=portnumber, username=username, password=password)
ftp=ssh.open_sftp()
ftp.chdir(directory)
filename='E_' + str(year) + '_GLEAM_v3.1b.nc'
local_filename = os.path.join(output_folder, filename)
if not os.path.exists(local_filename):
ftp.get(filename, local_filename)
if Waitbar == 1:
amount2 += 1
WaitbarConsole.printWaitBar(amount2, total_amount2, prefix = 'Progress:', suffix = 'Complete', length = 50)
ftp.close()
ssh.close()
return()
|
apache-2.0
| 4,973,839,900,364,555,000
| 32.828
| 121
| 0.607498
| false
| 3.391897
| false
| false
| false
|
delacuesta13/Who-am-I
|
itsme/models.py
|
1
|
4585
|
from django.db import models
from django.contrib.auth.models import User
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
phone = models.CharField(max_length=100, blank=True)
location = models.CharField(max_length=100, blank=True)
profession = models.CharField(max_length=100, blank=True)
about = models.TextField(blank=True) # quick overview
resume = models.TextField(blank=True) # complete overview
available_for_work = models.BooleanField(default=True)
class Blog(models.Model):
user = models.ForeignKey(User, unique=True)
site_title = models.CharField(max_length=100, blank=True)
tagline = models.CharField(max_length=100, blank=True)
def __unicode__(self):
return self.site_title
class Category(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100, blank=True)
description = models.TextField(blank=True)
type_category_choices = (
('work', 'Work'),
('blog', 'Blog'),
)
type_category = models.CharField(max_length=16, choices=type_category_choices)
def __unicode__(self):
return self.name
class Post(models.Model):
blog = models.ForeignKey(Blog)
categories = models.ManyToManyField(Category, through='CategoryRelationships')
date = models.DateTimeField(auto_now=False, auto_now_add=False)
title = models.TextField(blank=True)
slug = models.SlugField(max_length=100, unique=True, blank=True)
content = models.TextField(blank=True)
status_choices = (
('publish', 'Publish'),
('draft', 'Draft'),
('future', 'Schedule'),
)
status = models.CharField(max_length=16, choices=status_choices, default="publish")
allow_comments = models.BooleanField(default=True)
created_at = models.DateTimeField(auto_now=False, auto_now_add=True)
updated_at = models.DateTimeField(null=True, auto_now=True, auto_now_add=False)
def __unicode__(self):
return self.title
def get_status(self):
status = self.status.lower()
if status == 'draft':
status = status.capitalize()
elif status == 'publish':
status = 'Published'
elif status == 'future':
status = 'Schudeled'
return status
class Comment(models.Model):
post = models.ForeignKey(Post)
user = models.ForeignKey(User, null=True, blank=True, on_delete=models.SET_NULL)
author = models.CharField(max_length=30)
email = models.EmailField(max_length=100)
url = models.URLField(blank=True)
ip = models.IPAddressField(max_length=100)
date = models.DateTimeField(auto_now=False, auto_now_add=True)
content = models.TextField()
is_moderate = models.BooleanField(default=False)
is_safe = models.BooleanField(default=False) # if True, allow HTML code
class Project(models.Model):
user = models.ForeignKey(User)
categories = models.ManyToManyField(Category, through='CategoryRelationships')
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100, unique=True, blank=True)
description = models.TextField()
site_url = models.URLField(blank=True)
created_at = models.DateTimeField(auto_now=False, auto_now_add=True)
updated_at = models.DateTimeField(null=True, auto_now=True, auto_now_add=False)
def __unicode_(self):
return self.name
class CategoryRelationships(models.Model):
category = models.ForeignKey(Category)
post = models.ForeignKey(Post, null=True, blank=True)
project = models.ForeignKey(Project, null=True, blank=True)
class Message(models.Model):
user = models.ForeignKey(User)
date = models.DateTimeField(auto_now=False, auto_now_add=True)
ip = models.IPAddressField(max_length=100, null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
email = models.EmailField(max_length=100, null=True, blank=True)
subject = models.CharField(max_length=100, null=True, blank=True)
content = models.TextField(null=True, blank=True)
is_readed = models.BooleanField(default=False)
class Upload(models.Model):
user = models.ForeignKey(User)
path = models.TextField(blank=True)
title = models.TextField(blank=True)
upload_date = models.DateTimeField(null=True, auto_now=False, auto_now_add=True)
extension_file = models.CharField(max_length=100, blank=True)
description = models.TextField(blank=True)
|
gpl-3.0
| -7,769,707,275,793,938,000
| 40.306306
| 87
| 0.68506
| false
| 3.752046
| false
| false
| false
|
steny138/PythonTaipeiOpendata
|
migrations/versions/3974d310ac43_.py
|
1
|
3035
|
"""empty message
Revision ID: 3974d310ac43
Revises: edfc37a36914
Create Date: 2016-11-11 16:39:16.828429
"""
# revision identifiers, used by Alembic.
revision = '3974d310ac43'
down_revision = 'edfc37a36914'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('routes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('providerId', sa.Integer(), nullable=True),
sa.Column('providerName', sa.String(length=120), nullable=True),
sa.Column('routeName', sa.String(length=20), nullable=True),
sa.Column('pathAttributeId', sa.Integer(), nullable=True),
sa.Column('departure', sa.String(length=20), nullable=True),
sa.Column('destination', sa.String(length=20), nullable=True),
sa.Column('distance', sa.String(length=20), nullable=True),
sa.Column('goFirstBusTime', sa.String(length=4), nullable=True),
sa.Column('backFirstBusTime', sa.String(length=4), nullable=True),
sa.Column('goLastBusTime', sa.String(length=4), nullable=True),
sa.Column('backLastBusTime', sa.String(length=4), nullable=True),
sa.Column('holidayGoFirstBusTime', sa.String(length=4), nullable=True),
sa.Column('holidayBackFirstBusTime', sa.String(length=4), nullable=True),
sa.Column('holidayGoLastBusTime', sa.String(length=4), nullable=True),
sa.Column('holidayBackLastBusTime', sa.String(length=4), nullable=True),
sa.Column('segmentBuffer', sa.String(length=200), nullable=True),
sa.Column('ticketPriceDescription', sa.String(length=20), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('stops',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('routeId', sa.Integer(), nullable=True),
sa.Column('routeName', sa.String(length=200), nullable=True),
sa.Column('seqNo', sa.Integer(), nullable=True),
sa.Column('longitude', sa.String(length=50), nullable=True),
sa.Column('latitude', sa.String(length=50), nullable=True),
sa.Column('goBack', sa.String(length=2), nullable=True),
sa.Column('address', sa.String(length=200), nullable=True),
sa.Column('stopLocationId', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('chatid', sa.Integer(), nullable=True),
sa.Column('last_name', sa.String(length=120), nullable=True),
sa.Column('first_name', sa.String(length=120), nullable=True),
sa.Column('lat', sa.String(length=50), nullable=True),
sa.Column('lng', sa.String(length=50), nullable=True),
sa.Column('cmd', sa.String(length=1000), nullable=True),
sa.Column('bus_route', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
op.drop_table('stops')
op.drop_table('routes')
### end Alembic commands ###
|
apache-2.0
| 1,569,172,373,200,974,000
| 41.746479
| 77
| 0.685008
| false
| 3.31694
| false
| false
| false
|
mcfongtw/MkConfig
|
mkconfig/core/chain.py
|
1
|
1494
|
import logging
logger = logging.getLogger(__name__)
class ChainOfTransfiguration(object):
"""
A chain of responsibility implementation that channel through a series of transifgurations. One may depend
on previous step with respect to Context
"""
_chain = []
_context = {}
def __init__(self):
self._chain = []
self._context = {}
def add(self, transfiguration):
"""
Add a transfiguration into the chain of execution.
:param transfiguration: a transfiguration to be added
"""
self._chain.append(transfiguration)
logger.debug('Add transfiguration : [%s] to chain', transfiguration.__class__)
def get(self, index):
"""
Retrieve a transifguration in the chain at position [index]
:param index: index from 0 to size-of-chain
:return: the transfiguration at chain[index]
"""
return self._chain[index]
def size(self):
"""
Retrieve the # of transigurations in chain.
:return: length of chain
"""
return len(self._chain)
def execute(self, context = None):
"""
Perform execution of transfiguration one-by-one in the chain
:param context: a map of key-value attributes to perform
"""
for transfiguration in self._chain :
logger.info("Performing Transfiguration [%s]", transfiguration.__class__)
transfiguration.perform(context)
|
mit
| -3,838,645,005,129,700,400
| 24.322034
| 110
| 0.609103
| false
| 4.420118
| false
| false
| false
|
tiborsimko/invenio-jsonschemas
|
invenio_jsonschemas/errors.py
|
1
|
1593
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio-JSONSchemas errors."""
from __future__ import absolute_import, print_function
class JSONSchemaError(Exception):
"""Base class for errors in Invenio-JSONSchemas module."""
class JSONSchemaNotFound(JSONSchemaError):
"""Exception raised when a requested JSONSchema is not found."""
def __init__(self, schema, *args, **kwargs):
"""Constructor.
:param schema: path of the requested schema which was not found.
"""
self.schema = schema
super(JSONSchemaNotFound, self).__init__(
'Schema "{}" not found'.format(schema), *args, **kwargs
)
class JSONSchemaDuplicate(JSONSchemaError):
"""Exception raised when multiple schemas match the same path."""
def __init__(self, schema, first_dir, second_dir, *args, **kwargs):
"""Constructor.
:param schema: duplicate schema path.
:param first_dir: first directory where the schema was found.
:param second_dir: second directory where the schema was found.
"""
self.schema = schema
super(JSONSchemaDuplicate, self).__init__(
'Schema "{schema}" defined in multiple ' +
'directories: "{first}" and "{second}"'.format(
schema=schema,
first=first_dir,
second=second_dir),
*args, **kwargs)
|
mit
| 5,699,083,285,520,204,000
| 31.510204
| 72
| 0.625235
| false
| 4.328804
| false
| false
| false
|
qPCR4vir/orange3
|
Orange/widgets/unsupervised/owpca.py
|
1
|
14413
|
from PyQt4.QtGui import QFormLayout, QColor, QApplication, QLineEdit
from PyQt4.QtCore import Qt, QTimer
import numpy
import pyqtgraph as pg
from Orange.data import Table, Domain, StringVariable
from Orange.data.sql.table import SqlTable, AUTO_DL_LIMIT
from Orange.preprocess import Normalize
from Orange.projection import PCA
from Orange.widgets import widget, gui, settings
try:
from orangecontrib import remote
remotely = True
except ImportError:
remotely = False
class OWPCA(widget.OWWidget):
name = "PCA"
description = "Principal component analysis with a scree-diagram."
icon = "icons/PCA.svg"
priority = 3050
inputs = [("Data", Table, "set_data")]
outputs = [("Transformed data", Table),
("Components", Table),
("PCA", PCA)]
ncomponents = settings.Setting(2)
variance_covered = settings.Setting(100)
batch_size = settings.Setting(100)
address = settings.Setting('')
auto_update = settings.Setting(True)
auto_commit = settings.Setting(True)
normalize = settings.Setting(True)
maxp = settings.Setting(20)
axis_labels = settings.Setting(10)
graph_name = "plot.plotItem"
def __init__(self):
super().__init__()
self.data = None
self._pca = None
self._transformed = None
self._variance_ratio = None
self._cumulative = None
self._line = False
self._pca_projector = PCA()
self._pca_projector.component = self.ncomponents
self._pca_preprocessors = PCA.preprocessors
# Components Selection
box = gui.vBox(self.controlArea, "Components Selection")
form = QFormLayout()
box.layout().addLayout(form)
self.components_spin = gui.spin(
box, self, "ncomponents", 0, 1000,
callback=self._update_selection_component_spin,
keyboardTracking=False
)
self.components_spin.setSpecialValueText("All")
self.variance_spin = gui.spin(
box, self, "variance_covered", 1, 100,
callback=self._update_selection_variance_spin,
keyboardTracking=False
)
self.variance_spin.setSuffix("%")
form.addRow("Components:", self.components_spin)
form.addRow("Variance covered:", self.variance_spin)
# Incremental learning
self.sampling_box = gui.vBox(self.controlArea, "Incremental learning")
self.addresstext = QLineEdit(box)
self.addresstext.setPlaceholderText('Remote server')
if self.address:
self.addresstext.setText(self.address)
self.sampling_box.layout().addWidget(self.addresstext)
form = QFormLayout()
self.sampling_box.layout().addLayout(form)
self.batch_spin = gui.spin(
self.sampling_box, self, "batch_size", 50, 100000, step=50,
keyboardTracking=False)
form.addRow("Batch size ~ ", self.batch_spin)
self.start_button = gui.button(
self.sampling_box, self, "Start remote computation",
callback=self.start, autoDefault=False,
tooltip="Start/abort computation on the server")
self.start_button.setEnabled(False)
gui.checkBox(self.sampling_box, self, "auto_update",
"Periodically fetch model", callback=self.update_model)
self.__timer = QTimer(self, interval=2000)
self.__timer.timeout.connect(self.get_model)
self.sampling_box.setVisible(remotely)
# Options
self.options_box = gui.vBox(self.controlArea, "Options")
gui.checkBox(self.options_box, self, "normalize", "Normalize data",
callback=self._update_normalize)
self.maxp_spin = gui.spin(
self.options_box, self, "maxp", 1, 100,
label="Show only first", callback=self._setup_plot,
keyboardTracking=False
)
self.controlArea.layout().addStretch()
gui.auto_commit(self.controlArea, self, "auto_commit", "Apply",
checkbox_label="Apply automatically")
self.plot = pg.PlotWidget(background="w")
axis = self.plot.getAxis("bottom")
axis.setLabel("Principal Components")
axis = self.plot.getAxis("left")
axis.setLabel("Proportion of variance")
self.plot_horlabels = []
self.plot_horlines = []
self.plot.getViewBox().setMenuEnabled(False)
self.plot.getViewBox().setMouseEnabled(False, False)
self.plot.showGrid(True, True, alpha=0.5)
self.plot.setRange(xRange=(0.0, 1.0), yRange=(0.0, 1.0))
self.mainArea.layout().addWidget(self.plot)
self._update_normalize()
def update_model(self):
self.get_model()
if self.auto_update and self.rpca and not self.rpca.ready():
self.__timer.start(2000)
else:
self.__timer.stop()
def start(self):
if 'Abort' in self.start_button.text():
self.rpca.abort()
self.__timer.stop()
self.start_button.setText("Start remote computation")
else:
self.address = self.addresstext.text()
with remote.server(self.address):
from Orange.projection.pca import RemotePCA
maxiter = (1e5 + self.data.approx_len()) / self.batch_size * 3
self.rpca = RemotePCA(self.data, self.batch_size, int(maxiter))
self.update_model()
self.start_button.setText("Abort remote computation")
def set_data(self, data):
self.information(0)
if isinstance(data, SqlTable):
if data.approx_len() < AUTO_DL_LIMIT:
data = Table(data)
elif not remotely:
self.information(0, "Data has been sampled")
data_sample = data.sample_time(1, no_cache=True)
data_sample.download_data(2000, partial=True)
data = Table(data_sample)
self.data = data
self.fit()
def fit(self):
self.clear()
self.start_button.setEnabled(False)
if self.data is None:
return
data = self.data
self._transformed = None
if isinstance(data, SqlTable): # data was big and remote available
self.sampling_box.setVisible(True)
self.start_button.setText("Start remote computation")
self.start_button.setEnabled(True)
else:
self.sampling_box.setVisible(False)
pca = self._pca_projector(data)
variance_ratio = pca.explained_variance_ratio_
cumulative = numpy.cumsum(variance_ratio)
self.components_spin.setRange(0, len(cumulative))
self._pca = pca
self._variance_ratio = variance_ratio
self._cumulative = cumulative
self._setup_plot()
self.unconditional_commit()
def clear(self):
self._pca = None
self._transformed = None
self._variance_ratio = None
self._cumulative = None
self._line = None
self.plot_horlabels = []
self.plot_horlines = []
self.plot.clear()
def get_model(self):
if self.rpca is None:
return
if self.rpca.ready():
self.__timer.stop()
self.start_button.setText("Restart (finished)")
self._pca = self.rpca.get_state()
if self._pca is None:
return
self._variance_ratio = self._pca.explained_variance_ratio_
self._cumulative = numpy.cumsum(self._variance_ratio)
self._setup_plot()
self._transformed = None
self.commit()
def _setup_plot(self):
self.plot.clear()
explained_ratio = self._variance_ratio
explained = self._cumulative
p = min(len(self._variance_ratio), self.maxp)
self.plot.plot(numpy.arange(p), explained_ratio[:p],
pen=pg.mkPen(QColor(Qt.red), width=2),
antialias=True,
name="Variance")
self.plot.plot(numpy.arange(p), explained[:p],
pen=pg.mkPen(QColor(Qt.darkYellow), width=2),
antialias=True,
name="Cumulative Variance")
cutpos = self._nselected_components() - 1
self._line = pg.InfiniteLine(
angle=90, pos=cutpos, movable=True, bounds=(0, p - 1))
self._line.setCursor(Qt.SizeHorCursor)
self._line.setPen(pg.mkPen(QColor(Qt.black), width=2))
self._line.sigPositionChanged.connect(self._on_cut_changed)
self.plot.addItem(self._line)
self.plot_horlines = (
pg.PlotCurveItem(pen=pg.mkPen(QColor(Qt.blue), style=Qt.DashLine)),
pg.PlotCurveItem(pen=pg.mkPen(QColor(Qt.blue), style=Qt.DashLine)))
self.plot_horlabels = (
pg.TextItem(color=QColor(Qt.black), anchor=(1, 0)),
pg.TextItem(color=QColor(Qt.black), anchor=(1, 1)))
for item in self.plot_horlabels + self.plot_horlines:
self.plot.addItem(item)
self._set_horline_pos()
self.plot.setRange(xRange=(0.0, p - 1), yRange=(0.0, 1.0))
self._update_axis()
def _set_horline_pos(self):
cutidx = self.ncomponents - 1
for line, label, curve in zip(self.plot_horlines, self.plot_horlabels,
(self._variance_ratio, self._cumulative)):
y = curve[cutidx]
line.setData([-1, cutidx], 2 * [y])
label.setPos(cutidx, y)
label.setPlainText("{:.2f}".format(y))
def _on_cut_changed(self, line):
# cut changed by means of a cut line over the scree plot.
value = int(round(line.value()))
self._line.setValue(value)
current = self._nselected_components()
components = value + 1
if not (self.ncomponents == 0 and
components == len(self._variance_ratio)):
self.ncomponents = components
self._set_horline_pos()
if self._pca is not None:
self.variance_covered = self._cumulative[components - 1] * 100
if current != self._nselected_components():
self._invalidate_selection()
def _update_selection_component_spin(self):
# cut changed by "ncomponents" spin.
if self._pca is None:
self._invalidate_selection()
return
if self.ncomponents == 0:
# Special "All" value
cut = len(self._variance_ratio)
else:
cut = self.ncomponents
self.variance_covered = self._cumulative[cut - 1] * 100
if numpy.floor(self._line.value()) + 1 != cut:
self._line.setValue(cut - 1)
self._invalidate_selection()
def _update_selection_variance_spin(self):
# cut changed by "max variance" spin.
if self._pca is None:
return
cut = numpy.searchsorted(self._cumulative,
self.variance_covered / 100.0)
self.ncomponents = cut + 1
if numpy.floor(self._line.value()) + 1 != cut:
self._line.setValue(cut - 1)
self._invalidate_selection()
def _update_normalize(self):
if self.normalize:
pp = self._pca_preprocessors + [Normalize()]
else:
pp = self._pca_preprocessors
self._pca_projector.preprocessors = pp
self.fit()
if self.data is None:
self._invalidate_selection()
def _nselected_components(self):
"""Return the number of selected components."""
if self._pca is None:
return 0
if self.ncomponents == 0:
# Special "All" value
max_comp = len(self._variance_ratio)
else:
max_comp = self.ncomponents
var_max = self._cumulative[max_comp - 1]
if var_max != numpy.floor(self.variance_covered / 100.0):
cut = max_comp
self.variance_covered = var_max * 100
else:
self.ncomponents = cut = numpy.searchsorted(
self._cumulative, self.variance_covered / 100.0) + 1
return cut
def _invalidate_selection(self):
self.commit()
def _update_axis(self):
p = min(len(self._variance_ratio), self.maxp)
axis = self.plot.getAxis("bottom")
d = max((p-1)//(self.axis_labels-1), 1)
axis.setTicks([[(i, str(i+1)) for i in range(0, p, d)]])
def commit(self):
transformed = components = None
if self._pca is not None:
if self._transformed is None:
# Compute the full transform (all components) only once.
self._transformed = self._pca(self.data)
transformed = self._transformed
domain = Domain(
transformed.domain.attributes[:self.ncomponents],
self.data.domain.class_vars,
self.data.domain.metas
)
transformed = transformed.from_table(domain, transformed)
dom = Domain(self._pca.orig_domain.attributes,
metas=[StringVariable(name='component')])
metas = numpy.array([['PC{}'.format(i + 1)
for i in range(self.ncomponents)]],
dtype=object).T
components = Table(dom, self._pca.components_[:self.ncomponents],
metas=metas)
components.name = 'components'
self._pca_projector.component = self.ncomponents
self.send("Transformed data", transformed)
self.send("Components", components)
self.send("PCA", self._pca_projector)
def send_report(self):
if self.data is None:
return
self.report_items((
("Selected components", self.ncomponents),
("Explained variance", "{:.3f} %".format(self.variance_covered))
))
self.report_plot()
def main():
import gc
app = QApplication([])
w = OWPCA()
# data = Table("iris")
# data = Table("wine")
data = Table("housing")
w.set_data(data)
w.show()
w.raise_()
rval = w.exec()
w.deleteLater()
del w
app.processEvents()
gc.collect()
return rval
if __name__ == "__main__":
main()
|
bsd-2-clause
| 5,290,078,399,030,746,000
| 34.153659
| 80
| 0.576077
| false
| 3.978195
| false
| false
| false
|
apporc/neutron
|
neutron/db/db_base_plugin_v2.py
|
1
|
64808
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import netaddr
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import uuidutils
from sqlalchemy import and_
from sqlalchemy import event
from neutron._i18n import _, _LE, _LI
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import ipv6_utils
from neutron.common import utils
from neutron import context as ctx
from neutron.db import api as db_api
from neutron.db import db_base_plugin_common
from neutron.db import ipam_non_pluggable_backend
from neutron.db import ipam_pluggable_backend
from neutron.db import models_v2
from neutron.db import rbac_db_mixin as rbac_mixin
from neutron.db import rbac_db_models as rbac_db
from neutron.db import sqlalchemyutils
from neutron.extensions import l3
from neutron import ipam
from neutron.ipam import subnet_alloc
from neutron import manager
from neutron import neutron_plugin_base_v2
from neutron.notifiers import nova as nova_notifier
from neutron.plugins.common import constants as service_constants
LOG = logging.getLogger(__name__)
# Ports with the following 'device_owner' values will not prevent
# network deletion. If delete_network() finds that all ports on a
# network have these owners, it will explicitly delete each port
# and allow network deletion to continue. Similarly, if delete_subnet()
# finds out that all existing IP Allocations are associated with ports
# with these owners, it will allow subnet deletion to proceed with the
# IP allocations being cleaned up by cascade.
AUTO_DELETE_PORT_OWNERS = [constants.DEVICE_OWNER_DHCP]
DNS_DOMAIN_DEFAULT = 'openstacklocal.'
FQDN_MAX_LEN = 255
def _check_subnet_not_used(context, subnet_id):
try:
kwargs = {'context': context, 'subnet_id': subnet_id}
registry.notify(
resources.SUBNET, events.BEFORE_DELETE, None, **kwargs)
except exceptions.CallbackFailure as e:
raise n_exc.SubnetInUse(subnet_id=subnet_id, reason=e)
class NeutronDbPluginV2(db_base_plugin_common.DbBasePluginCommon,
neutron_plugin_base_v2.NeutronPluginBaseV2,
rbac_mixin.RbacPluginMixin):
"""V2 Neutron plugin interface implementation using SQLAlchemy models.
Whenever a non-read call happens the plugin will call an event handler
class method (e.g., network_created()). The result is that this class
can be sub-classed by other classes that add custom behaviors on certain
events.
"""
# This attribute specifies whether the plugin supports or not
# bulk/pagination/sorting operations. Name mangling is used in
# order to ensure it is qualified by class
__native_bulk_support = True
__native_pagination_support = True
__native_sorting_support = True
def __init__(self):
self.set_ipam_backend()
if cfg.CONF.notify_nova_on_port_status_changes:
# NOTE(arosen) These event listeners are here to hook into when
# port status changes and notify nova about their change.
self.nova_notifier = nova_notifier.Notifier()
event.listen(models_v2.Port, 'after_insert',
self.nova_notifier.send_port_status)
event.listen(models_v2.Port, 'after_update',
self.nova_notifier.send_port_status)
event.listen(models_v2.Port.status, 'set',
self.nova_notifier.record_port_status_changed)
for e in (events.BEFORE_CREATE, events.BEFORE_UPDATE,
events.BEFORE_DELETE):
registry.subscribe(self.validate_network_rbac_policy_change,
rbac_mixin.RBAC_POLICY, e)
def validate_network_rbac_policy_change(self, resource, event, trigger,
context, object_type, policy,
**kwargs):
"""Validates network RBAC policy changes.
On creation, verify that the creator is an admin or that it owns the
network it is sharing.
On update and delete, make sure the tenant losing access does not have
resources that depend on that access.
"""
if object_type != 'network':
# we only care about network policies
return
# The object a policy targets cannot be changed so we can look
# at the original network for the update event as well.
net = self._get_network(context, policy['object_id'])
if event in (events.BEFORE_CREATE, events.BEFORE_UPDATE):
# we still have to verify that the caller owns the network because
# _get_network will succeed on a shared network
if not context.is_admin and net['tenant_id'] != context.tenant_id:
msg = _("Only admins can manipulate policies on networks "
"they do not own.")
raise n_exc.InvalidInput(error_message=msg)
tenant_to_check = None
if event == events.BEFORE_UPDATE:
new_tenant = kwargs['policy_update']['target_tenant']
if policy['target_tenant'] != new_tenant:
tenant_to_check = policy['target_tenant']
if event == events.BEFORE_DELETE:
tenant_to_check = policy['target_tenant']
if tenant_to_check:
self.ensure_no_tenant_ports_on_network(net['id'], net['tenant_id'],
tenant_to_check)
def ensure_no_tenant_ports_on_network(self, network_id, net_tenant_id,
tenant_id):
ctx_admin = ctx.get_admin_context()
rb_model = rbac_db.NetworkRBAC
other_rbac_entries = self._model_query(ctx_admin, rb_model).filter(
and_(rb_model.object_id == network_id,
rb_model.action == 'access_as_shared'))
ports = self._model_query(ctx_admin, models_v2.Port).filter(
models_v2.Port.network_id == network_id)
if tenant_id == '*':
# for the wildcard we need to get all of the rbac entries to
# see if any allow the remaining ports on the network.
other_rbac_entries = other_rbac_entries.filter(
rb_model.target_tenant != tenant_id)
# any port with another RBAC entry covering it or one belonging to
# the same tenant as the network owner is ok
allowed_tenants = [entry['target_tenant']
for entry in other_rbac_entries]
allowed_tenants.append(net_tenant_id)
ports = ports.filter(
~models_v2.Port.tenant_id.in_(allowed_tenants))
else:
# if there is a wildcard rule, we can return early because it
# allows any ports
query = other_rbac_entries.filter(rb_model.target_tenant == '*')
if query.count():
return
ports = ports.filter(models_v2.Port.tenant_id == tenant_id)
if ports.count():
raise n_exc.InvalidSharedSetting(network=network_id)
def set_ipam_backend(self):
if cfg.CONF.ipam_driver:
self.ipam = ipam_pluggable_backend.IpamPluggableBackend()
else:
self.ipam = ipam_non_pluggable_backend.IpamNonPluggableBackend()
def _validate_host_route(self, route, ip_version):
try:
netaddr.IPNetwork(route['destination'])
netaddr.IPAddress(route['nexthop'])
except netaddr.core.AddrFormatError:
err_msg = _("Invalid route: %s") % route
raise n_exc.InvalidInput(error_message=err_msg)
except ValueError:
# netaddr.IPAddress would raise this
err_msg = _("Invalid route: %s") % route
raise n_exc.InvalidInput(error_message=err_msg)
self._validate_ip_version(ip_version, route['nexthop'], 'nexthop')
self._validate_ip_version(ip_version, route['destination'],
'destination')
def _validate_shared_update(self, context, id, original, updated):
# The only case that needs to be validated is when 'shared'
# goes from True to False
if updated['shared'] == original.shared or updated['shared']:
return
ports = self._model_query(
context, models_v2.Port).filter(
and_(
models_v2.Port.network_id == id,
models_v2.Port.device_owner !=
constants.DEVICE_OWNER_ROUTER_GW,
models_v2.Port.device_owner !=
constants.DEVICE_OWNER_FLOATINGIP))
subnets = self._model_query(
context, models_v2.Subnet).filter(
models_v2.Subnet.network_id == id)
tenant_ids = set([port['tenant_id'] for port in ports] +
[subnet['tenant_id'] for subnet in subnets])
# raise if multiple tenants found or if the only tenant found
# is not the owner of the network
if (len(tenant_ids) > 1 or len(tenant_ids) == 1 and
tenant_ids.pop() != original.tenant_id):
raise n_exc.InvalidSharedSetting(network=original.name)
def _validate_ipv6_attributes(self, subnet, cur_subnet):
if cur_subnet:
self._validate_ipv6_update_dhcp(subnet, cur_subnet)
return
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
address_mode_set = attributes.is_attr_set(
subnet.get('ipv6_address_mode'))
self._validate_ipv6_dhcp(ra_mode_set, address_mode_set,
subnet['enable_dhcp'])
if ra_mode_set and address_mode_set:
self._validate_ipv6_combination(subnet['ipv6_ra_mode'],
subnet['ipv6_address_mode'])
if address_mode_set or ra_mode_set:
self._validate_eui64_applicable(subnet)
def _validate_eui64_applicable(self, subnet):
# Per RFC 4862, section 5.5.3, prefix length and interface
# id together should be equal to 128. Currently neutron supports
# EUI64 interface id only, thus limiting the prefix
# length to be 64 only.
if ipv6_utils.is_auto_address_subnet(subnet):
if netaddr.IPNetwork(subnet['cidr']).prefixlen != 64:
msg = _('Invalid CIDR %s for IPv6 address mode. '
'OpenStack uses the EUI-64 address format, '
'which requires the prefix to be /64.')
raise n_exc.InvalidInput(
error_message=(msg % subnet['cidr']))
def _validate_ipv6_combination(self, ra_mode, address_mode):
if ra_mode != address_mode:
msg = _("ipv6_ra_mode set to '%(ra_mode)s' with ipv6_address_mode "
"set to '%(addr_mode)s' is not valid. "
"If both attributes are set, they must be the same value"
) % {'ra_mode': ra_mode, 'addr_mode': address_mode}
raise n_exc.InvalidInput(error_message=msg)
def _validate_ipv6_dhcp(self, ra_mode_set, address_mode_set, enable_dhcp):
if (ra_mode_set or address_mode_set) and not enable_dhcp:
msg = _("ipv6_ra_mode or ipv6_address_mode cannot be set when "
"enable_dhcp is set to False.")
raise n_exc.InvalidInput(error_message=msg)
def _validate_ipv6_update_dhcp(self, subnet, cur_subnet):
if ('enable_dhcp' in subnet and not subnet['enable_dhcp']):
msg = _("Cannot disable enable_dhcp with "
"ipv6 attributes set")
ra_mode_set = attributes.is_attr_set(subnet.get('ipv6_ra_mode'))
address_mode_set = attributes.is_attr_set(
subnet.get('ipv6_address_mode'))
if ra_mode_set or address_mode_set:
raise n_exc.InvalidInput(error_message=msg)
old_ra_mode_set = attributes.is_attr_set(
cur_subnet.get('ipv6_ra_mode'))
old_address_mode_set = attributes.is_attr_set(
cur_subnet.get('ipv6_address_mode'))
if old_ra_mode_set or old_address_mode_set:
raise n_exc.InvalidInput(error_message=msg)
def _create_bulk(self, resource, context, request_items):
objects = []
collection = "%ss" % resource
items = request_items[collection]
context.session.begin(subtransactions=True)
try:
for item in items:
obj_creator = getattr(self, 'create_%s' % resource)
objects.append(obj_creator(context, item))
context.session.commit()
except Exception:
context.session.rollback()
with excutils.save_and_reraise_exception():
LOG.error(_LE("An exception occurred while creating "
"the %(resource)s:%(item)s"),
{'resource': resource, 'item': item})
return objects
def create_network_bulk(self, context, networks):
return self._create_bulk('network', context, networks)
def create_network(self, context, network):
"""Handle creation of a single network."""
# single request processing
n = network['network']
# NOTE(jkoelker) Get the tenant_id outside of the session to avoid
# unneeded db action if the operation raises
tenant_id = self._get_tenant_id_for_create(context, n)
with context.session.begin(subtransactions=True):
args = {'tenant_id': tenant_id,
'id': n.get('id') or uuidutils.generate_uuid(),
'name': n['name'],
'admin_state_up': n['admin_state_up'],
'mtu': n.get('mtu', constants.DEFAULT_NETWORK_MTU),
'status': n.get('status', constants.NET_STATUS_ACTIVE)}
network = models_v2.Network(**args)
if n['shared']:
entry = rbac_db.NetworkRBAC(
network=network, action='access_as_shared',
target_tenant='*', tenant_id=network['tenant_id'])
context.session.add(entry)
context.session.add(network)
return self._make_network_dict(network, process_extensions=False,
context=context)
def update_network(self, context, id, network):
n = network['network']
with context.session.begin(subtransactions=True):
network = self._get_network(context, id)
# validate 'shared' parameter
if 'shared' in n:
entry = None
for item in network.rbac_entries:
if (item.action == 'access_as_shared' and
item.target_tenant == '*'):
entry = item
break
setattr(network, 'shared', True if entry else False)
self._validate_shared_update(context, id, network, n)
update_shared = n.pop('shared')
if update_shared and not entry:
entry = rbac_db.NetworkRBAC(
network=network, action='access_as_shared',
target_tenant='*', tenant_id=network['tenant_id'])
context.session.add(entry)
elif not update_shared and entry:
context.session.delete(entry)
context.session.expire(network, ['rbac_entries'])
network.update(n)
return self._make_network_dict(network, context=context)
def delete_network(self, context, id):
with context.session.begin(subtransactions=True):
network = self._get_network(context, id)
context.session.query(models_v2.Port).filter_by(
network_id=id).filter(
models_v2.Port.device_owner.
in_(AUTO_DELETE_PORT_OWNERS)).delete(synchronize_session=False)
port_in_use = context.session.query(models_v2.Port).filter_by(
network_id=id).first()
if port_in_use:
raise n_exc.NetworkInUse(net_id=id)
# clean up subnets
subnets = self._get_subnets_by_network(context, id)
for subnet in subnets:
self.delete_subnet(context, subnet['id'])
context.session.delete(network)
def get_network(self, context, id, fields=None):
network = self._get_network(context, id)
return self._make_network_dict(network, fields, context=context)
def get_networks(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'network', limit, marker)
make_network_dict = functools.partial(self._make_network_dict,
context=context)
return self._get_collection(context, models_v2.Network,
make_network_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def get_networks_count(self, context, filters=None):
return self._get_collection_count(context, models_v2.Network,
filters=filters)
def create_subnet_bulk(self, context, subnets):
return self._create_bulk('subnet', context, subnets)
def _validate_ip_version(self, ip_version, addr, name):
"""Check IP field of a subnet match specified ip version."""
ip = netaddr.IPNetwork(addr)
if ip.version != ip_version:
data = {'name': name,
'addr': addr,
'ip_version': ip_version}
msg = _("%(name)s '%(addr)s' does not match "
"the ip_version '%(ip_version)s'") % data
raise n_exc.InvalidInput(error_message=msg)
def _validate_subnet(self, context, s, cur_subnet=None):
"""Validate a subnet spec."""
# This method will validate attributes which may change during
# create_subnet() and update_subnet().
# The method requires the subnet spec 's' has 'ip_version' field.
# If 's' dict does not have 'ip_version' field in an API call
# (e.g., update_subnet()), you need to set 'ip_version' field
# before calling this method.
ip_ver = s['ip_version']
if attributes.is_attr_set(s.get('cidr')):
self._validate_ip_version(ip_ver, s['cidr'], 'cidr')
# TODO(watanabe.isao): After we found a way to avoid the re-sync
# from the agent side, this restriction could be removed.
if cur_subnet:
dhcp_was_enabled = cur_subnet.enable_dhcp
else:
dhcp_was_enabled = False
if s.get('enable_dhcp') and not dhcp_was_enabled:
subnet_prefixlen = netaddr.IPNetwork(s['cidr']).prefixlen
error_message = _("Subnet has a prefix length that is "
"incompatible with DHCP service enabled.")
if ((ip_ver == 4 and subnet_prefixlen > 30) or
(ip_ver == 6 and subnet_prefixlen > 126)):
raise n_exc.InvalidInput(error_message=error_message)
net = netaddr.IPNetwork(s['cidr'])
if net.is_multicast():
error_message = _("Multicast IP subnet is not supported "
"if enable_dhcp is True.")
raise n_exc.InvalidInput(error_message=error_message)
elif net.is_loopback():
error_message = _("Loopback IP subnet is not supported "
"if enable_dhcp is True.")
raise n_exc.InvalidInput(error_message=error_message)
if attributes.is_attr_set(s.get('gateway_ip')):
self._validate_ip_version(ip_ver, s['gateway_ip'], 'gateway_ip')
if (cfg.CONF.force_gateway_on_subnet and
not ipam.utils.check_gateway_in_subnet(
s['cidr'], s['gateway_ip'])):
error_message = _("Gateway is not valid on subnet")
raise n_exc.InvalidInput(error_message=error_message)
# Ensure the gateway IP is not assigned to any port
# skip this check in case of create (s parameter won't have id)
# NOTE(salv-orlando): There is slight chance of a race, when
# a subnet-update and a router-interface-add operation are
# executed concurrently
if cur_subnet and not ipv6_utils.is_ipv6_pd_enabled(s):
alloc_qry = context.session.query(models_v2.IPAllocation)
allocated = alloc_qry.filter_by(
ip_address=cur_subnet['gateway_ip'],
subnet_id=cur_subnet['id']).first()
if allocated and allocated['port_id']:
raise n_exc.GatewayIpInUse(
ip_address=cur_subnet['gateway_ip'],
port_id=allocated['port_id'])
if attributes.is_attr_set(s.get('dns_nameservers')):
if len(s['dns_nameservers']) > cfg.CONF.max_dns_nameservers:
raise n_exc.DNSNameServersExhausted(
subnet_id=s.get('id', _('new subnet')),
quota=cfg.CONF.max_dns_nameservers)
for dns in s['dns_nameservers']:
try:
netaddr.IPAddress(dns)
except Exception:
raise n_exc.InvalidInput(
error_message=(_("Error parsing dns address %s") %
dns))
self._validate_ip_version(ip_ver, dns, 'dns_nameserver')
if attributes.is_attr_set(s.get('host_routes')):
if len(s['host_routes']) > cfg.CONF.max_subnet_host_routes:
raise n_exc.HostRoutesExhausted(
subnet_id=s.get('id', _('new subnet')),
quota=cfg.CONF.max_subnet_host_routes)
# check if the routes are all valid
for rt in s['host_routes']:
self._validate_host_route(rt, ip_ver)
if ip_ver == 4:
if attributes.is_attr_set(s.get('ipv6_ra_mode')):
raise n_exc.InvalidInput(
error_message=(_("ipv6_ra_mode is not valid when "
"ip_version is 4")))
if attributes.is_attr_set(s.get('ipv6_address_mode')):
raise n_exc.InvalidInput(
error_message=(_("ipv6_address_mode is not valid when "
"ip_version is 4")))
if ip_ver == 6:
self._validate_ipv6_attributes(s, cur_subnet)
def _validate_subnet_for_pd(self, subnet):
"""Validates that subnet parameters are correct for IPv6 PD"""
if (subnet.get('ip_version') != constants.IP_VERSION_6):
reason = _("Prefix Delegation can only be used with IPv6 "
"subnets.")
raise n_exc.BadRequest(resource='subnets', msg=reason)
mode_list = [constants.IPV6_SLAAC,
constants.DHCPV6_STATELESS,
attributes.ATTR_NOT_SPECIFIED]
ra_mode = subnet.get('ipv6_ra_mode')
if ra_mode not in mode_list:
reason = _("IPv6 RA Mode must be SLAAC or Stateless for "
"Prefix Delegation.")
raise n_exc.BadRequest(resource='subnets', msg=reason)
address_mode = subnet.get('ipv6_address_mode')
if address_mode not in mode_list:
reason = _("IPv6 Address Mode must be SLAAC or Stateless for "
"Prefix Delegation.")
raise n_exc.BadRequest(resource='subnets', msg=reason)
def _update_router_gw_ports(self, context, network, subnet):
l3plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if l3plugin:
gw_ports = self._get_router_gw_ports_by_network(context,
network['id'])
router_ids = [p['device_id'] for p in gw_ports]
ctx_admin = context.elevated()
ext_subnets_dict = {s['id']: s for s in network['subnets']}
for id in router_ids:
router = l3plugin.get_router(ctx_admin, id)
external_gateway_info = router['external_gateway_info']
# Get all stateful (i.e. non-SLAAC/DHCPv6-stateless) fixed ips
fips = [f for f in external_gateway_info['external_fixed_ips']
if not ipv6_utils.is_auto_address_subnet(
ext_subnets_dict[f['subnet_id']])]
num_fips = len(fips)
# Don't add the fixed IP to the port if it already
# has a stateful fixed IP of the same IP version
if num_fips > 1:
continue
if num_fips == 1 and netaddr.IPAddress(
fips[0]['ip_address']).version == subnet['ip_version']:
continue
external_gateway_info['external_fixed_ips'].append(
{'subnet_id': subnet['id']})
info = {'router': {'external_gateway_info':
external_gateway_info}}
l3plugin.update_router(context, id, info)
def _create_subnet(self, context, subnet, subnetpool_id):
s = subnet['subnet']
with context.session.begin(subtransactions=True):
network = self._get_network(context, s["network_id"])
subnet, ipam_subnet = self.ipam.allocate_subnet(context,
network,
s,
subnetpool_id)
if hasattr(network, 'external') and network.external:
self._update_router_gw_ports(context,
network,
subnet)
# If this subnet supports auto-addressing, then update any
# internal ports on the network with addresses for this subnet.
if ipv6_utils.is_auto_address_subnet(subnet):
self.ipam.add_auto_addrs_on_network_ports(context, subnet,
ipam_subnet)
return self._make_subnet_dict(subnet, context=context)
def _get_subnetpool_id(self, context, subnet):
"""Returns the subnetpool id for this request
If the pool id was explicitly set in the request then that will be
returned, even if it is None.
Otherwise, the default pool for the IP version requested will be
returned. This will either be a pool id or None (the default for each
configuration parameter). This implies that the ip version must be
either set implicitly with a specific cidr or explicitly using
ip_version attribute.
:param subnet: The subnet dict from the request
"""
subnetpool_id = subnet.get('subnetpool_id',
attributes.ATTR_NOT_SPECIFIED)
if subnetpool_id != attributes.ATTR_NOT_SPECIFIED:
return subnetpool_id
cidr = subnet.get('cidr')
if attributes.is_attr_set(cidr):
ip_version = netaddr.IPNetwork(cidr).version
else:
ip_version = subnet.get('ip_version')
if not attributes.is_attr_set(ip_version):
msg = _('ip_version must be specified in the absence of '
'cidr and subnetpool_id')
raise n_exc.BadRequest(resource='subnets', msg=msg)
if ip_version == 6 and cfg.CONF.ipv6_pd_enabled:
return constants.IPV6_PD_POOL_ID
subnetpool = self.get_default_subnetpool(context, ip_version)
if subnetpool:
return subnetpool['id']
# Until the default_subnet_pool config options are removed in the N
# release, check for them after get_default_subnetpool returns None.
# TODO(john-davidge): Remove after Mitaka release.
if ip_version == 4:
return cfg.CONF.default_ipv4_subnet_pool
return cfg.CONF.default_ipv6_subnet_pool
def create_subnet(self, context, subnet):
s = subnet['subnet']
cidr = s.get('cidr', attributes.ATTR_NOT_SPECIFIED)
prefixlen = s.get('prefixlen', attributes.ATTR_NOT_SPECIFIED)
has_cidr = attributes.is_attr_set(cidr)
has_prefixlen = attributes.is_attr_set(prefixlen)
if has_cidr and has_prefixlen:
msg = _('cidr and prefixlen must not be supplied together')
raise n_exc.BadRequest(resource='subnets', msg=msg)
if has_cidr:
# turn the CIDR into a proper subnet
net = netaddr.IPNetwork(s['cidr'])
subnet['subnet']['cidr'] = '%s/%s' % (net.network, net.prefixlen)
s['tenant_id'] = self._get_tenant_id_for_create(context, s)
subnetpool_id = self._get_subnetpool_id(context, s)
if subnetpool_id:
self.ipam.validate_pools_with_subnetpool(s)
if subnetpool_id == constants.IPV6_PD_POOL_ID:
if has_cidr:
# We do not currently support requesting a specific
# cidr with IPv6 prefix delegation. Set the subnetpool_id
# to None and allow the request to continue as normal.
subnetpool_id = None
self._validate_subnet(context, s)
else:
prefix = constants.PROVISIONAL_IPV6_PD_PREFIX
subnet['subnet']['cidr'] = prefix
self._validate_subnet_for_pd(s)
else:
if not has_cidr:
msg = _('A cidr must be specified in the absence of a '
'subnet pool')
raise n_exc.BadRequest(resource='subnets', msg=msg)
self._validate_subnet(context, s)
return self._create_subnet(context, subnet, subnetpool_id)
def _update_allocation_pools(self, subnet):
"""Gets new allocation pools and formats them correctly"""
allocation_pools = self.ipam.generate_pools(subnet['cidr'],
subnet['gateway_ip'])
return [{'start': str(netaddr.IPAddress(p.first,
subnet['ip_version'])),
'end': str(netaddr.IPAddress(p.last, subnet['ip_version']))}
for p in allocation_pools]
def update_subnet(self, context, id, subnet):
"""Update the subnet with new info.
The change however will not be realized until the client renew the
dns lease or we support gratuitous DHCP offers
"""
s = subnet['subnet']
new_cidr = s.get('cidr')
db_subnet = self._get_subnet(context, id)
# Fill 'ip_version' and 'allocation_pools' fields with the current
# value since _validate_subnet() expects subnet spec has 'ip_version'
# and 'allocation_pools' fields.
s['ip_version'] = db_subnet.ip_version
s['cidr'] = db_subnet.cidr
s['id'] = db_subnet.id
s['tenant_id'] = db_subnet.tenant_id
s['subnetpool_id'] = db_subnet.subnetpool_id
self._validate_subnet(context, s, cur_subnet=db_subnet)
db_pools = [netaddr.IPRange(p['first_ip'], p['last_ip'])
for p in db_subnet.allocation_pools]
update_ports_needed = False
if new_cidr and ipv6_utils.is_ipv6_pd_enabled(s):
# This is an ipv6 prefix delegation-enabled subnet being given an
# updated cidr by the process_prefix_update RPC
s['cidr'] = new_cidr
update_ports_needed = True
net = netaddr.IPNetwork(s['cidr'], s['ip_version'])
# Update gateway_ip and allocation pools based on new cidr
s['gateway_ip'] = utils.get_first_host_ip(net, s['ip_version'])
s['allocation_pools'] = self._update_allocation_pools(s)
range_pools = None
if s.get('allocation_pools') is not None:
# Convert allocation pools to IPRange to simplify future checks
range_pools = self.ipam.pools_to_ip_range(s['allocation_pools'])
self.ipam.validate_allocation_pools(range_pools, s['cidr'])
s['allocation_pools'] = range_pools
# If either gateway_ip or allocation_pools were specified
gateway_ip = s.get('gateway_ip', db_subnet.gateway_ip)
gateway_ip_changed = gateway_ip != db_subnet.gateway_ip
if gateway_ip_changed or s.get('allocation_pools') is not None:
pools = range_pools if range_pools is not None else db_pools
if gateway_ip:
self.ipam.validate_gw_out_of_pools(gateway_ip, pools)
if gateway_ip_changed:
# Provide pre-update notification not to break plugins that don't
# support gateway ip change
kwargs = {'context': context, 'subnet_id': id,
'network_id': db_subnet.network_id}
registry.notify(resources.SUBNET_GATEWAY, events.BEFORE_UPDATE,
self, **kwargs)
with context.session.begin(subtransactions=True):
subnet, changes = self.ipam.update_db_subnet(context, id, s,
db_pools)
result = self._make_subnet_dict(subnet, context=context)
# Keep up with fields that changed
result.update(changes)
if update_ports_needed:
# Find ports that have not yet been updated
# with an IP address by Prefix Delegation, and update them
ports = self.get_ports(context)
routers = []
for port in ports:
fixed_ips = []
new_port = {'port': port}
for ip in port['fixed_ips']:
if ip['subnet_id'] == s['id']:
fixed_ip = {'subnet_id': s['id']}
if "router_interface" in port['device_owner']:
routers.append(port['device_id'])
fixed_ip['ip_address'] = s['gateway_ip']
fixed_ips.append(fixed_ip)
if fixed_ips:
new_port['port']['fixed_ips'] = fixed_ips
self.update_port(context, port['id'], new_port)
# Send router_update to l3_agent
if routers:
l3_rpc_notifier = l3_rpc_agent_api.L3AgentNotifyAPI()
l3_rpc_notifier.routers_updated(context, routers)
if gateway_ip_changed:
kwargs = {'context': context, 'subnet_id': id,
'network_id': db_subnet.network_id}
registry.notify(resources.SUBNET_GATEWAY, events.AFTER_UPDATE,
self, **kwargs)
return result
def _subnet_check_ip_allocations(self, context, subnet_id):
return (context.session.query(models_v2.IPAllocation).
filter_by(subnet_id=subnet_id).join(models_v2.Port).first())
def _subnet_get_user_allocation(self, context, subnet_id):
"""Check if there are any user ports on subnet and return first."""
# need to join with ports table as IPAllocation's port
# is not joined eagerly and thus producing query which yields
# incorrect results
return (context.session.query(models_v2.IPAllocation).
filter_by(subnet_id=subnet_id).join(models_v2.Port).
filter(~models_v2.Port.device_owner.
in_(AUTO_DELETE_PORT_OWNERS)).first())
def _subnet_check_ip_allocations_internal_router_ports(self, context,
subnet_id):
# Do not delete the subnet if IP allocations for internal
# router ports still exist
allocs = context.session.query(models_v2.IPAllocation).filter_by(
subnet_id=subnet_id).join(models_v2.Port).filter(
models_v2.Port.device_owner.in_(
constants.ROUTER_INTERFACE_OWNERS)
).first()
if allocs:
LOG.debug("Subnet %s still has internal router ports, "
"cannot delete", subnet_id)
raise n_exc.SubnetInUse(subnet_id=id)
def delete_subnet(self, context, id):
with context.session.begin(subtransactions=True):
subnet = self._get_subnet(context, id)
# Make sure the subnet isn't used by other resources
_check_subnet_not_used(context, id)
# Delete all network owned ports
qry_network_ports = (
context.session.query(models_v2.IPAllocation).
filter_by(subnet_id=subnet['id']).
join(models_v2.Port))
# Remove network owned ports, and delete IP allocations
# for IPv6 addresses which were automatically generated
# via SLAAC
is_auto_addr_subnet = ipv6_utils.is_auto_address_subnet(subnet)
if is_auto_addr_subnet:
self._subnet_check_ip_allocations_internal_router_ports(
context, id)
else:
qry_network_ports = (
qry_network_ports.filter(models_v2.Port.device_owner.
in_(AUTO_DELETE_PORT_OWNERS)))
network_ports = qry_network_ports.all()
if network_ports:
for port in network_ports:
context.session.delete(port)
# Check if there are more IP allocations, unless
# is_auto_address_subnet is True. In that case the check is
# unnecessary. This additional check not only would be wasteful
# for this class of subnet, but is also error-prone since when
# the isolation level is set to READ COMMITTED allocations made
# concurrently will be returned by this query
if not is_auto_addr_subnet:
alloc = self._subnet_check_ip_allocations(context, id)
if alloc:
LOG.info(_LI("Found port (%(port_id)s, %(ip)s) having IP "
"allocation on subnet "
"%(subnet)s, cannot delete"),
{'ip': alloc.ip_address,
'port_id': alloc.port_id,
'subnet': id})
raise n_exc.SubnetInUse(subnet_id=id)
context.session.delete(subnet)
# Delete related ipam subnet manually,
# since there is no FK relationship
self.ipam.delete_subnet(context, id)
def get_subnet(self, context, id, fields=None):
subnet = self._get_subnet(context, id)
return self._make_subnet_dict(subnet, fields, context=context)
def get_subnets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_subnets(context, filters, fields, sorts, limit,
marker, page_reverse)
def get_subnets_count(self, context, filters=None):
return self._get_collection_count(context, models_v2.Subnet,
filters=filters)
def get_subnets_by_network(self, context, network_id):
return [self._make_subnet_dict(subnet_db) for subnet_db in
self._get_subnets_by_network(context, network_id)]
def _create_subnetpool_prefix(self, context, cidr, subnetpool_id):
prefix_args = {'cidr': cidr, 'subnetpool_id': subnetpool_id}
subnetpool_prefix = models_v2.SubnetPoolPrefix(**prefix_args)
context.session.add(subnetpool_prefix)
def _validate_address_scope_id(self, context, address_scope_id,
subnetpool_id, sp_prefixes):
"""Validate the address scope before associating.
Subnetpool can associate with an address scope if
- the tenant user is the owner of both the subnetpool and
address scope
- the admin is associating the subnetpool with the shared
address scope
- there is no prefix conflict with the existing subnetpools
associated with the address scope.
"""
if not attributes.is_attr_set(address_scope_id):
return
if not self.is_address_scope_owned_by_tenant(context,
address_scope_id):
raise n_exc.IllegalSubnetPoolAssociationToAddressScope(
subnetpool_id=subnetpool_id, address_scope_id=address_scope_id)
subnetpools = self._get_subnetpools_by_address_scope_id(
context, address_scope_id)
new_set = netaddr.IPSet(sp_prefixes)
for sp in subnetpools:
if sp.id == subnetpool_id:
continue
sp_set = netaddr.IPSet([prefix['cidr'] for prefix in sp.prefixes])
if sp_set.intersection(new_set):
raise n_exc.AddressScopePrefixConflict()
def _check_subnetpool_update_allowed(self, context, subnetpool_id,
address_scope_id):
"""Check if the subnetpool can be updated or not.
If the subnetpool is associated to a shared address scope not owned
by the tenant, then the subnetpool cannot be updated.
"""
if not self.is_address_scope_owned_by_tenant(context,
address_scope_id):
msg = _("subnetpool %(subnetpool_id)s cannot be updated when"
" associated with shared address scope "
"%(address_scope_id)s") % {
'subnetpool_id': subnetpool_id,
'address_scope_id': address_scope_id}
raise n_exc.IllegalSubnetPoolUpdate(reason=msg)
def _check_default_subnetpool_exists(self, context, ip_version):
"""Check if a default already exists for the given IP version.
There can only be one default subnetpool for each IP family. Raise an
InvalidInput error if a default has already been set.
"""
if self.get_default_subnetpool(context, ip_version):
msg = _("A default subnetpool for this IP family has already "
"been set. Only one default may exist per IP family")
raise n_exc.InvalidInput(error_message=msg)
def create_subnetpool(self, context, subnetpool):
"""Create a subnetpool"""
sp = subnetpool['subnetpool']
sp_reader = subnet_alloc.SubnetPoolReader(sp)
if sp_reader.address_scope_id is attributes.ATTR_NOT_SPECIFIED:
sp_reader.address_scope_id = None
if sp_reader.is_default:
self._check_default_subnetpool_exists(context,
sp_reader.ip_version)
self._validate_address_scope_id(context, sp_reader.address_scope_id,
id, sp_reader.prefixes)
tenant_id = self._get_tenant_id_for_create(context, sp)
with context.session.begin(subtransactions=True):
pool_args = {'tenant_id': tenant_id,
'id': sp_reader.id,
'name': sp_reader.name,
'ip_version': sp_reader.ip_version,
'default_prefixlen':
sp_reader.default_prefixlen,
'min_prefixlen': sp_reader.min_prefixlen,
'max_prefixlen': sp_reader.max_prefixlen,
'is_default': sp_reader.is_default,
'shared': sp_reader.shared,
'default_quota': sp_reader.default_quota,
'address_scope_id': sp_reader.address_scope_id}
subnetpool = models_v2.SubnetPool(**pool_args)
context.session.add(subnetpool)
for prefix in sp_reader.prefixes:
self._create_subnetpool_prefix(context,
prefix,
subnetpool.id)
return self._make_subnetpool_dict(subnetpool)
def _update_subnetpool_prefixes(self, context, prefix_list, id):
with context.session.begin(subtransactions=True):
context.session.query(models_v2.SubnetPoolPrefix).filter_by(
subnetpool_id=id).delete()
for prefix in prefix_list:
model_prefix = models_v2.SubnetPoolPrefix(cidr=prefix,
subnetpool_id=id)
context.session.add(model_prefix)
def _updated_subnetpool_dict(self, model, new_pool):
updated = {}
new_prefixes = new_pool.get('prefixes', attributes.ATTR_NOT_SPECIFIED)
orig_prefixes = [str(x.cidr) for x in model['prefixes']]
if new_prefixes is not attributes.ATTR_NOT_SPECIFIED:
orig_set = netaddr.IPSet(orig_prefixes)
new_set = netaddr.IPSet(new_prefixes)
if not orig_set.issubset(new_set):
msg = _("Existing prefixes must be "
"a subset of the new prefixes")
raise n_exc.IllegalSubnetPoolPrefixUpdate(msg=msg)
new_set.compact()
updated['prefixes'] = [str(x.cidr) for x in new_set.iter_cidrs()]
else:
updated['prefixes'] = orig_prefixes
for key in ['id', 'name', 'ip_version', 'min_prefixlen',
'max_prefixlen', 'default_prefixlen', 'is_default',
'shared', 'default_quota', 'address_scope_id']:
self._write_key(key, updated, model, new_pool)
return updated
def _write_key(self, key, update, orig, new_dict):
new_val = new_dict.get(key, attributes.ATTR_NOT_SPECIFIED)
if new_val is not attributes.ATTR_NOT_SPECIFIED:
update[key] = new_dict[key]
else:
update[key] = orig[key]
def update_subnetpool(self, context, id, subnetpool):
"""Update a subnetpool"""
new_sp = subnetpool['subnetpool']
with context.session.begin(subtransactions=True):
orig_sp = self._get_subnetpool(context, id)
updated = self._updated_subnetpool_dict(orig_sp, new_sp)
updated['tenant_id'] = orig_sp.tenant_id
reader = subnet_alloc.SubnetPoolReader(updated)
if reader.is_default and not orig_sp.is_default:
self._check_default_subnetpool_exists(context,
reader.ip_version)
if orig_sp.address_scope_id:
self._check_subnetpool_update_allowed(context, id,
orig_sp.address_scope_id)
self._validate_address_scope_id(context, reader.address_scope_id,
id, reader.prefixes)
orig_sp.update(self._filter_non_model_columns(
reader.subnetpool,
models_v2.SubnetPool))
self._update_subnetpool_prefixes(context,
reader.prefixes,
id)
for key in ['min_prefixlen', 'max_prefixlen', 'default_prefixlen']:
updated['key'] = str(updated[key])
return updated
def get_subnetpool(self, context, id, fields=None):
"""Retrieve a subnetpool."""
subnetpool = self._get_subnetpool(context, id)
return self._make_subnetpool_dict(subnetpool, fields)
def get_subnetpools(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
"""Retrieve list of subnetpools."""
marker_obj = self._get_marker_obj(context, 'subnetpool', limit, marker)
collection = self._get_collection(context, models_v2.SubnetPool,
self._make_subnetpool_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
return collection
def get_default_subnetpool(self, context, ip_version):
"""Retrieve the default subnetpool for the given IP version."""
filters = {'is_default': [True],
'ip_version': [ip_version]}
subnetpool = self.get_subnetpools(context, filters=filters)
if subnetpool:
return subnetpool[0]
def delete_subnetpool(self, context, id):
"""Delete a subnetpool."""
with context.session.begin(subtransactions=True):
subnetpool = self._get_subnetpool(context, id)
subnets = self._get_subnets_by_subnetpool(context, id)
if subnets:
reason = _("Subnet pool has existing allocations")
raise n_exc.SubnetPoolDeleteError(reason=reason)
context.session.delete(subnetpool)
def _check_mac_addr_update(self, context, port, new_mac, device_owner):
if (device_owner and
device_owner.startswith(constants.DEVICE_OWNER_NETWORK_PREFIX)):
raise n_exc.UnsupportedPortDeviceOwner(
op=_("mac address update"), port_id=id,
device_owner=device_owner)
def create_port_bulk(self, context, ports):
return self._create_bulk('port', context, ports)
def _get_dns_domain(self):
if not cfg.CONF.dns_domain:
return ''
if cfg.CONF.dns_domain.endswith('.'):
return cfg.CONF.dns_domain
return '%s.' % cfg.CONF.dns_domain
def _get_request_dns_name(self, port):
dns_domain = self._get_dns_domain()
if ((dns_domain and dns_domain != DNS_DOMAIN_DEFAULT)):
return port.get('dns_name', '')
return ''
def _get_dns_names_for_port(self, context, ips, request_dns_name):
dns_assignment = []
dns_domain = self._get_dns_domain()
if request_dns_name:
request_fqdn = request_dns_name
if not request_dns_name.endswith('.'):
request_fqdn = '%s.%s' % (request_dns_name, dns_domain)
for ip in ips:
if request_dns_name:
hostname = request_dns_name
fqdn = request_fqdn
else:
hostname = 'host-%s' % ip['ip_address'].replace(
'.', '-').replace(':', '-')
fqdn = hostname
if dns_domain:
fqdn = '%s.%s' % (hostname, dns_domain)
dns_assignment.append({'ip_address': ip['ip_address'],
'hostname': hostname,
'fqdn': fqdn})
return dns_assignment
def _create_port_with_mac(self, context, network_id, port_data,
mac_address):
try:
# since this method could either be used within or outside the
# transaction, use convenience method to avoid passing a flag
with db_api.autonested_transaction(context.session):
db_port = models_v2.Port(mac_address=mac_address, **port_data)
context.session.add(db_port)
return db_port
except db_exc.DBDuplicateEntry:
raise n_exc.MacAddressInUse(net_id=network_id, mac=mac_address)
def _create_port(self, context, network_id, port_data):
max_retries = cfg.CONF.mac_generation_retries
for i in range(max_retries):
mac = self._generate_mac()
try:
return self._create_port_with_mac(
context, network_id, port_data, mac)
except n_exc.MacAddressInUse:
LOG.debug('Generated mac %(mac_address)s exists on '
'network %(network_id)s',
{'mac_address': mac, 'network_id': network_id})
LOG.error(_LE("Unable to generate mac address after %s attempts"),
max_retries)
raise n_exc.MacAddressGenerationFailure(net_id=network_id)
def create_port(self, context, port):
p = port['port']
port_id = p.get('id') or uuidutils.generate_uuid()
network_id = p['network_id']
# NOTE(jkoelker) Get the tenant_id outside of the session to avoid
# unneeded db action if the operation raises
tenant_id = self._get_tenant_id_for_create(context, p)
if p.get('device_owner'):
self._enforce_device_owner_not_router_intf_or_device_id(
context, p.get('device_owner'), p.get('device_id'), tenant_id)
port_data = dict(tenant_id=tenant_id,
name=p['name'],
id=port_id,
network_id=network_id,
admin_state_up=p['admin_state_up'],
status=p.get('status', constants.PORT_STATUS_ACTIVE),
device_id=p['device_id'],
device_owner=p['device_owner'])
if 'dns_name' in p:
request_dns_name = self._get_request_dns_name(p)
port_data['dns_name'] = request_dns_name
with context.session.begin(subtransactions=True):
# Ensure that the network exists.
self._get_network(context, network_id)
# Create the port
if p['mac_address'] is attributes.ATTR_NOT_SPECIFIED:
db_port = self._create_port(context, network_id, port_data)
p['mac_address'] = db_port['mac_address']
else:
db_port = self._create_port_with_mac(
context, network_id, port_data, p['mac_address'])
ips = self.ipam.allocate_ips_for_port_and_store(context, port,
port_id)
if 'dns_name' in p:
dns_assignment = []
if ips:
dns_assignment = self._get_dns_names_for_port(
context, ips, request_dns_name)
if 'dns_name' in p:
db_port['dns_assignment'] = dns_assignment
return self._make_port_dict(db_port, process_extensions=False)
def _validate_port_for_update(self, context, db_port, new_port, new_mac):
changed_owner = 'device_owner' in new_port
current_owner = (new_port.get('device_owner') or
db_port['device_owner'])
changed_device_id = new_port.get('device_id') != db_port['device_id']
current_device_id = new_port.get('device_id') or db_port['device_id']
if current_owner and changed_device_id or changed_owner:
self._enforce_device_owner_not_router_intf_or_device_id(
context, current_owner, current_device_id,
db_port['tenant_id'])
if new_mac and new_mac != db_port['mac_address']:
self._check_mac_addr_update(context, db_port,
new_mac, current_owner)
def _get_dns_names_for_updated_port(self, context, original_ips,
original_dns_name, request_dns_name,
changes):
if changes.original or changes.add or changes.remove:
return self._get_dns_names_for_port(
context, changes.original + changes.add,
request_dns_name or original_dns_name)
if original_ips:
return self._get_dns_names_for_port(
context, original_ips,
request_dns_name or original_dns_name)
return []
def update_port(self, context, id, port):
new_port = port['port']
with context.session.begin(subtransactions=True):
port = self._get_port(context, id)
if 'dns-integration' in self.supported_extension_aliases:
original_ips = self._make_fixed_ip_dict(port['fixed_ips'])
original_dns_name = port.get('dns_name', '')
request_dns_name = self._get_request_dns_name(new_port)
if not request_dns_name:
new_port['dns_name'] = ''
new_mac = new_port.get('mac_address')
self._validate_port_for_update(context, port, new_port, new_mac)
changes = self.ipam.update_port_with_ips(context, port,
new_port, new_mac)
if 'dns-integration' in self.supported_extension_aliases:
dns_assignment = self._get_dns_names_for_updated_port(
context, original_ips, original_dns_name,
request_dns_name, changes)
result = self._make_port_dict(port)
# Keep up with fields that changed
if changes.original or changes.add or changes.remove:
result['fixed_ips'] = self._make_fixed_ip_dict(
changes.original + changes.add)
if 'dns-integration' in self.supported_extension_aliases:
result['dns_assignment'] = dns_assignment
return result
def delete_port(self, context, id):
with context.session.begin(subtransactions=True):
self.ipam.delete_port(context, id)
def delete_ports_by_device_id(self, context, device_id, network_id=None):
query = (context.session.query(models_v2.Port.id)
.enable_eagerloads(False)
.filter(models_v2.Port.device_id == device_id))
if network_id:
query = query.filter(models_v2.Port.network_id == network_id)
port_ids = [p[0] for p in query]
for port_id in port_ids:
try:
self.delete_port(context, port_id)
except n_exc.PortNotFound:
# Don't raise if something else concurrently deleted the port
LOG.debug("Ignoring PortNotFound when deleting port '%s'. "
"The port has already been deleted.",
port_id)
def _get_dns_name_for_port_get(self, context, port):
if port['fixed_ips']:
return self._get_dns_names_for_port(
context, port['fixed_ips'],
port['dns_name'])
return []
def get_port(self, context, id, fields=None):
port = self._get_port(context, id)
if (('dns-integration' in self.supported_extension_aliases and
'dns_name' in port)):
port['dns_assignment'] = self._get_dns_name_for_port_get(context,
port)
return self._make_port_dict(port, fields)
def _get_ports_query(self, context, filters=None, sorts=None, limit=None,
marker_obj=None, page_reverse=False):
Port = models_v2.Port
IPAllocation = models_v2.IPAllocation
if not filters:
filters = {}
query = self._model_query(context, Port)
fixed_ips = filters.pop('fixed_ips', {})
ip_addresses = fixed_ips.get('ip_address')
subnet_ids = fixed_ips.get('subnet_id')
if ip_addresses or subnet_ids:
query = query.join(Port.fixed_ips)
if ip_addresses:
query = query.filter(IPAllocation.ip_address.in_(ip_addresses))
if subnet_ids:
query = query.filter(IPAllocation.subnet_id.in_(subnet_ids))
query = self._apply_filters_to_query(query, Port, filters, context)
if limit and page_reverse and sorts:
sorts = [(s[0], not s[1]) for s in sorts]
query = sqlalchemyutils.paginate_query(query, Port, limit,
sorts, marker_obj)
return query
def get_ports(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'port', limit, marker)
query = self._get_ports_query(context, filters=filters,
sorts=sorts, limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
items = []
for c in query:
if (('dns-integration' in self.supported_extension_aliases and
'dns_name' in c)):
c['dns_assignment'] = self._get_dns_name_for_port_get(context,
c)
items.append(self._make_port_dict(c, fields))
if limit and page_reverse:
items.reverse()
return items
def get_ports_count(self, context, filters=None):
return self._get_ports_query(context, filters).count()
def _enforce_device_owner_not_router_intf_or_device_id(self, context,
device_owner,
device_id,
tenant_id):
"""Prevent tenants from replacing the device id of router ports with
a router uuid belonging to another tenant.
"""
if device_owner not in constants.ROUTER_INTERFACE_OWNERS:
return
if not context.is_admin:
# check to make sure device_id does not match another tenants
# router.
if device_id:
if hasattr(self, 'get_router'):
try:
ctx_admin = context.elevated()
router = self.get_router(ctx_admin, device_id)
except l3.RouterNotFound:
return
else:
l3plugin = (
manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT))
if l3plugin:
try:
ctx_admin = context.elevated()
router = l3plugin.get_router(ctx_admin,
device_id)
except l3.RouterNotFound:
return
else:
# raise as extension doesn't support L3 anyways.
raise n_exc.DeviceIDNotOwnedByTenant(
device_id=device_id)
if tenant_id != router['tenant_id']:
raise n_exc.DeviceIDNotOwnedByTenant(device_id=device_id)
|
apache-2.0
| -2,228,443,469,237,160,700
| 46.201748
| 79
| 0.556737
| false
| 4.275498
| false
| false
| false
|
tmtowtdi/django
|
mysite/polls/admin.py
|
1
|
1401
|
from django.contrib import admin
from polls.models import Question, Choice
class ChoiceInLine( admin.TabularInline ):
model = Choice
extra = 3
class QuestionAdmin( admin.ModelAdmin ):
### By default, the Question object's str() is displayed on the "list of
### questions" page. Tell it to display a little more data.
list_display = ( 'question_text', 'pub_date', 'was_published_recently' )
### This adds a sidebar div to the right that lets the user filter the
### displayed questions - only show those published today, this week, this
### month, etc.
list_filter = [ 'pub_date' ]
### Adds a search box up top
search_fields = [ 'question_text' ]
### The questions displayed will be automatically paginated, by default
### 100 per page. We can change that number per page to whatever we want.
list_per_page = 50
### See polls/models.py for some settings on how we're controlling display
### and sorting of the was_published_recently column.
### Re-order the fields as they display on the admin page, adding a
### fieldset.
fieldsets = [
(None, { 'fields': ['question_text'] }),
('Date Information', { 'fields': ['pub_date' ], 'classes': ['collapse'] }),
]
inlines = [ ChoiceInLine ]
admin.site.register( Question, QuestionAdmin )
|
artistic-2.0
| -2,186,924,659,745,510,700
| 33.170732
| 91
| 0.630978
| false
| 4.132743
| false
| false
| false
|
bsipocz/astropy
|
astropy/utils/misc.py
|
1
|
40318
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
A "grab bag" of relatively small general-purpose utilities that don't have
a clear module/package to live in.
"""
import abc
import copy
import contextlib
import difflib
import inspect
import json
import os
import signal
import sys
import traceback
import unicodedata
import locale
import threading
import re
from itertools import zip_longest
from contextlib import contextmanager
from collections import defaultdict, OrderedDict
from astropy.utils.decorators import deprecated
__all__ = ['isiterable', 'silence', 'format_exception', 'NumpyRNGContext',
'find_api_page', 'is_path_hidden', 'walk_skip_hidden',
'JsonCustomEncoder', 'indent', 'InheritDocstrings',
'OrderedDescriptor', 'OrderedDescriptorContainer', 'set_locale',
'ShapedLikeNDArray', 'check_broadcast', 'IncompatibleShapeError',
'dtype_bytes_or_chars']
def isiterable(obj):
"""Returns `True` if the given object is iterable."""
try:
iter(obj)
return True
except TypeError:
return False
def indent(s, shift=1, width=4):
"""Indent a block of text. The indentation is applied to each line."""
indented = '\n'.join(' ' * (width * shift) + l if l else ''
for l in s.splitlines())
if s[-1] == '\n':
indented += '\n'
return indented
class _DummyFile:
"""A noop writeable object."""
def write(self, s):
pass
@contextlib.contextmanager
def silence():
"""A context manager that silences sys.stdout and sys.stderr."""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = _DummyFile()
sys.stderr = _DummyFile()
yield
sys.stdout = old_stdout
sys.stderr = old_stderr
def format_exception(msg, *args, **kwargs):
"""
Given an exception message string, uses new-style formatting arguments
``{filename}``, ``{lineno}``, ``{func}`` and/or ``{text}`` to fill in
information about the exception that occurred. For example:
try:
1/0
except:
raise ZeroDivisionError(
format_except('A divide by zero occurred in {filename} at '
'line {lineno} of function {func}.'))
Any additional positional or keyword arguments passed to this function are
also used to format the message.
.. note::
This uses `sys.exc_info` to gather up the information needed to fill
in the formatting arguments. Since `sys.exc_info` is not carried
outside a handled exception, it's not wise to use this
outside of an ``except`` clause - if it is, this will substitute
'<unkonwn>' for the 4 formatting arguments.
"""
tb = traceback.extract_tb(sys.exc_info()[2], limit=1)
if len(tb) > 0:
filename, lineno, func, text = tb[0]
else:
filename = lineno = func = text = '<unknown>'
return msg.format(*args, filename=filename, lineno=lineno, func=func,
text=text, **kwargs)
class NumpyRNGContext:
"""
A context manager (for use with the ``with`` statement) that will seed the
numpy random number generator (RNG) to a specific value, and then restore
the RNG state back to whatever it was before.
This is primarily intended for use in the astropy testing suit, but it
may be useful in ensuring reproducibility of Monte Carlo simulations in a
science context.
Parameters
----------
seed : int
The value to use to seed the numpy RNG
Examples
--------
A typical use case might be::
with NumpyRNGContext(<some seed value you pick>):
from numpy import random
randarr = random.randn(100)
... run your test using `randarr` ...
#Any code using numpy.random at this indent level will act just as it
#would have if it had been before the with statement - e.g. whatever
#the default seed is.
"""
def __init__(self, seed):
self.seed = seed
def __enter__(self):
from numpy import random
self.startstate = random.get_state()
random.seed(self.seed)
def __exit__(self, exc_type, exc_value, traceback):
from numpy import random
random.set_state(self.startstate)
def find_api_page(obj, version=None, openinbrowser=True, timeout=None):
"""
Determines the URL of the API page for the specified object, and
optionally open that page in a web browser.
.. note::
You must be connected to the internet for this to function even if
``openinbrowser`` is `False`, unless you provide a local version of
the documentation to ``version`` (e.g., ``file:///path/to/docs``).
Parameters
----------
obj
The object to open the docs for or its fully-qualified name
(as a str).
version : str
The doc version - either a version number like '0.1', 'dev' for
the development/latest docs, or a URL to point to a specific
location that should be the *base* of the documentation. Defaults to
latest if you are on aren't on a release, otherwise, the version you
are on.
openinbrowser : bool
If `True`, the `webbrowser` package will be used to open the doc
page in a new web browser window.
timeout : number, optional
The number of seconds to wait before timing-out the query to
the astropy documentation. If not given, the default python
stdlib timeout will be used.
Returns
-------
url : str
The loaded URL
Raises
------
ValueError
If the documentation can't be found
"""
import webbrowser
import urllib.request
from zlib import decompress
if (not isinstance(obj, str) and
hasattr(obj, '__module__') and
hasattr(obj, '__name__')):
obj = obj.__module__ + '.' + obj.__name__
elif inspect.ismodule(obj):
obj = obj.__name__
if version is None:
from astropy import version
if version.release:
version = 'v' + version.version
else:
version = 'dev'
if '://' in version:
if version.endswith('index.html'):
baseurl = version[:-10]
elif version.endswith('/'):
baseurl = version
else:
baseurl = version + '/'
elif version == 'dev' or version == 'latest':
baseurl = 'http://devdocs.astropy.org/'
else:
baseurl = f'https://docs.astropy.org/en/{version}/'
# Custom request headers; see
# https://github.com/astropy/astropy/issues/8990
req = urllib.request.Request(
baseurl + 'objects.inv', headers={'User-Agent': f'Astropy/{version}'})
if timeout is None:
uf = urllib.request.urlopen(req)
else:
uf = urllib.request.urlopen(req, timeout=timeout)
try:
oiread = uf.read()
# need to first read/remove the first four lines, which have info before
# the compressed section with the actual object inventory
idx = -1
headerlines = []
for _ in range(4):
oldidx = idx
idx = oiread.index(b'\n', oldidx + 1)
headerlines.append(oiread[(oldidx+1):idx].decode('utf-8'))
# intersphinx version line, project name, and project version
ivers, proj, vers, compr = headerlines
if 'The remainder of this file is compressed using zlib' not in compr:
raise ValueError('The file downloaded from {} does not seem to be'
'the usual Sphinx objects.inv format. Maybe it '
'has changed?'.format(baseurl + 'objects.inv'))
compressed = oiread[(idx+1):]
finally:
uf.close()
decompressed = decompress(compressed).decode('utf-8')
resurl = None
for l in decompressed.strip().splitlines():
ls = l.split()
name = ls[0]
loc = ls[3]
if loc.endswith('$'):
loc = loc[:-1] + name
if name == obj:
resurl = baseurl + loc
break
if resurl is None:
raise ValueError(f'Could not find the docs for the object {obj}')
elif openinbrowser:
webbrowser.open(resurl)
return resurl
def signal_number_to_name(signum):
"""
Given an OS signal number, returns a signal name. If the signal
number is unknown, returns ``'UNKNOWN'``.
"""
# Since these numbers and names are platform specific, we use the
# builtin signal module and build a reverse mapping.
signal_to_name_map = dict((k, v) for v, k in signal.__dict__.items()
if v.startswith('SIG'))
return signal_to_name_map.get(signum, 'UNKNOWN')
if sys.platform == 'win32':
import ctypes
def _has_hidden_attribute(filepath):
"""
Returns True if the given filepath has the hidden attribute on
MS-Windows. Based on a post here:
https://stackoverflow.com/questions/284115/cross-platform-hidden-file-detection
"""
if isinstance(filepath, bytes):
filepath = filepath.decode(sys.getfilesystemencoding())
try:
attrs = ctypes.windll.kernel32.GetFileAttributesW(filepath)
result = bool(attrs & 2) and attrs != -1
except AttributeError:
result = False
return result
else:
def _has_hidden_attribute(filepath):
return False
def is_path_hidden(filepath):
"""
Determines if a given file or directory is hidden.
Parameters
----------
filepath : str
The path to a file or directory
Returns
-------
hidden : bool
Returns `True` if the file is hidden
"""
name = os.path.basename(os.path.abspath(filepath))
if isinstance(name, bytes):
is_dotted = name.startswith(b'.')
else:
is_dotted = name.startswith('.')
return is_dotted or _has_hidden_attribute(filepath)
def walk_skip_hidden(top, onerror=None, followlinks=False):
"""
A wrapper for `os.walk` that skips hidden files and directories.
This function does not have the parameter ``topdown`` from
`os.walk`: the directories must always be recursed top-down when
using this function.
See also
--------
os.walk : For a description of the parameters
"""
for root, dirs, files in os.walk(
top, topdown=True, onerror=onerror,
followlinks=followlinks):
# These lists must be updated in-place so os.walk will skip
# hidden directories
dirs[:] = [d for d in dirs if not is_path_hidden(d)]
files[:] = [f for f in files if not is_path_hidden(f)]
yield root, dirs, files
class JsonCustomEncoder(json.JSONEncoder):
"""Support for data types that JSON default encoder
does not do.
This includes:
* Numpy array or number
* Complex number
* Set
* Bytes
* astropy.UnitBase
* astropy.Quantity
Examples
--------
>>> import json
>>> import numpy as np
>>> from astropy.utils.misc import JsonCustomEncoder
>>> json.dumps(np.arange(3), cls=JsonCustomEncoder)
'[0, 1, 2]'
"""
def default(self, obj):
from astropy import units as u
import numpy as np
if isinstance(obj, u.Quantity):
return dict(value=obj.value, unit=obj.unit.to_string())
if isinstance(obj, (np.number, np.ndarray)):
return obj.tolist()
elif isinstance(obj, complex):
return [obj.real, obj.imag]
elif isinstance(obj, set):
return list(obj)
elif isinstance(obj, bytes): # pragma: py3
return obj.decode()
elif isinstance(obj, (u.UnitBase, u.FunctionUnitBase)):
if obj == u.dimensionless_unscaled:
obj = 'dimensionless_unit'
else:
return obj.to_string()
return json.JSONEncoder.default(self, obj)
def strip_accents(s):
"""
Remove accents from a Unicode string.
This helps with matching "ångström" to "angstrom", for example.
"""
return ''.join(
c for c in unicodedata.normalize('NFD', s)
if unicodedata.category(c) != 'Mn')
def did_you_mean(s, candidates, n=3, cutoff=0.8, fix=None):
"""
When a string isn't found in a set of candidates, we can be nice
to provide a list of alternatives in the exception. This
convenience function helps to format that part of the exception.
Parameters
----------
s : str
candidates : sequence of str or dict of str keys
n : int
The maximum number of results to include. See
`difflib.get_close_matches`.
cutoff : float
In the range [0, 1]. Possibilities that don't score at least
that similar to word are ignored. See
`difflib.get_close_matches`.
fix : callable
A callable to modify the results after matching. It should
take a single string and return a sequence of strings
containing the fixed matches.
Returns
-------
message : str
Returns the string "Did you mean X, Y, or Z?", or the empty
string if no alternatives were found.
"""
if isinstance(s, str):
s = strip_accents(s)
s_lower = s.lower()
# Create a mapping from the lower case name to all capitalization
# variants of that name.
candidates_lower = {}
for candidate in candidates:
candidate_lower = candidate.lower()
candidates_lower.setdefault(candidate_lower, [])
candidates_lower[candidate_lower].append(candidate)
# The heuristic here is to first try "singularizing" the word. If
# that doesn't match anything use difflib to find close matches in
# original, lower and upper case.
if s_lower.endswith('s') and s_lower[:-1] in candidates_lower:
matches = [s_lower[:-1]]
else:
matches = difflib.get_close_matches(
s_lower, candidates_lower, n=n, cutoff=cutoff)
if len(matches):
capitalized_matches = set()
for match in matches:
capitalized_matches.update(candidates_lower[match])
matches = capitalized_matches
if fix is not None:
mapped_matches = []
for match in matches:
mapped_matches.extend(fix(match))
matches = mapped_matches
matches = list(set(matches))
matches = sorted(matches)
if len(matches) == 1:
matches = matches[0]
else:
matches = (', '.join(matches[:-1]) + ' or ' +
matches[-1])
return f'Did you mean {matches}?'
return ''
@deprecated('4.0', alternative='Sphinx>=1.7 automatically inherits docstring')
class InheritDocstrings(type):
"""
This metaclass makes methods of a class automatically have their
docstrings filled in from the methods they override in the base
class.
If the class uses multiple inheritance, the docstring will be
chosen from the first class in the bases list, in the same way as
methods are normally resolved in Python. If this results in
selecting the wrong docstring, the docstring will need to be
explicitly included on the method.
For example::
>>> import warnings
>>> from astropy.utils.misc import InheritDocstrings
>>> with warnings.catch_warnings():
... # Ignore deprecation warning
... warnings.simplefilter('ignore')
... class A(metaclass=InheritDocstrings):
... def wiggle(self):
... "Wiggle the thingamajig"
... pass
... class B(A):
... def wiggle(self):
... pass
>>> B.wiggle.__doc__
u'Wiggle the thingamajig'
"""
def __init__(cls, name, bases, dct):
def is_public_member(key):
return (
(key.startswith('__') and key.endswith('__')
and len(key) > 4) or
not key.startswith('_'))
for key, val in dct.items():
if ((inspect.isfunction(val) or inspect.isdatadescriptor(val)) and
is_public_member(key) and
val.__doc__ is None):
for base in cls.__mro__[1:]:
super_method = getattr(base, key, None)
if super_method is not None:
val.__doc__ = super_method.__doc__
break
super().__init__(name, bases, dct)
class OrderedDescriptor(metaclass=abc.ABCMeta):
"""
Base class for descriptors whose order in the class body should be
preserved. Intended for use in concert with the
`OrderedDescriptorContainer` metaclass.
Subclasses of `OrderedDescriptor` must define a value for a class attribute
called ``_class_attribute_``. This is the name of a class attribute on the
*container* class for these descriptors, which will be set to an
`~collections.OrderedDict` at class creation time. This
`~collections.OrderedDict` will contain a mapping of all class attributes
that were assigned instances of the `OrderedDescriptor` subclass, to the
instances themselves. See the documentation for
`OrderedDescriptorContainer` for a concrete example.
Optionally, subclasses of `OrderedDescriptor` may define a value for a
class attribute called ``_name_attribute_``. This should be the name of
an attribute on instances of the subclass. When specified, during
creation of a class containing these descriptors, the name attribute on
each instance will be set to the name of the class attribute it was
assigned to on the class.
.. note::
Although this class is intended for use with *descriptors* (i.e.
classes that define any of the ``__get__``, ``__set__``, or
``__delete__`` magic methods), this base class is not itself a
descriptor, and technically this could be used for classes that are
not descriptors too. However, use with descriptors is the original
intended purpose.
"""
# This id increments for each OrderedDescriptor instance created, so they
# are always ordered in the order they were created. Class bodies are
# guaranteed to be executed from top to bottom. Not sure if this is
# thread-safe though.
_nextid = 1
@property
@abc.abstractmethod
def _class_attribute_(self):
"""
Subclasses should define this attribute to the name of an attribute on
classes containing this subclass. That attribute will contain the mapping
of all instances of that `OrderedDescriptor` subclass defined in the class
body. If the same descriptor needs to be used with different classes,
each with different names of this attribute, multiple subclasses will be
needed.
"""
_name_attribute_ = None
"""
Subclasses may optionally define this attribute to specify the name of an
attribute on instances of the class that should be filled with the
instance's attribute name at class creation time.
"""
def __init__(self, *args, **kwargs):
# The _nextid attribute is shared across all subclasses so that
# different subclasses of OrderedDescriptors can be sorted correctly
# between themselves
self.__order = OrderedDescriptor._nextid
OrderedDescriptor._nextid += 1
super().__init__()
def __lt__(self, other):
"""
Defined for convenient sorting of `OrderedDescriptor` instances, which
are defined to sort in their creation order.
"""
if (isinstance(self, OrderedDescriptor) and
isinstance(other, OrderedDescriptor)):
try:
return self.__order < other.__order
except AttributeError:
raise RuntimeError(
'Could not determine ordering for {} and {}; at least '
'one of them is not calling super().__init__ in its '
'__init__.'.format(self, other))
else:
return NotImplemented
class OrderedDescriptorContainer(type):
"""
Classes should use this metaclass if they wish to use `OrderedDescriptor`
attributes, which are class attributes that "remember" the order in which
they were defined in the class body.
Every subclass of `OrderedDescriptor` has an attribute called
``_class_attribute_``. For example, if we have
.. code:: python
class ExampleDecorator(OrderedDescriptor):
_class_attribute_ = '_examples_'
Then when a class with the `OrderedDescriptorContainer` metaclass is
created, it will automatically be assigned a class attribute ``_examples_``
referencing an `~collections.OrderedDict` containing all instances of
``ExampleDecorator`` defined in the class body, mapped to by the names of
the attributes they were assigned to.
When subclassing a class with this metaclass, the descriptor dict (i.e.
``_examples_`` in the above example) will *not* contain descriptors
inherited from the base class. That is, this only works by default with
decorators explicitly defined in the class body. However, the subclass
*may* define an attribute ``_inherit_decorators_`` which lists
`OrderedDescriptor` classes that *should* be added from base classes.
See the examples section below for an example of this.
Examples
--------
>>> from astropy.utils import OrderedDescriptor, OrderedDescriptorContainer
>>> class TypedAttribute(OrderedDescriptor):
... \"\"\"
... Attributes that may only be assigned objects of a specific type,
... or subclasses thereof. For some reason we care about their order.
... \"\"\"
...
... _class_attribute_ = 'typed_attributes'
... _name_attribute_ = 'name'
... # A default name so that instances not attached to a class can
... # still be repr'd; useful for debugging
... name = '<unbound>'
...
... def __init__(self, type):
... # Make sure not to forget to call the super __init__
... super().__init__()
... self.type = type
...
... def __get__(self, obj, objtype=None):
... if obj is None:
... return self
... if self.name in obj.__dict__:
... return obj.__dict__[self.name]
... else:
... raise AttributeError(self.name)
...
... def __set__(self, obj, value):
... if not isinstance(value, self.type):
... raise ValueError('{0}.{1} must be of type {2!r}'.format(
... obj.__class__.__name__, self.name, self.type))
... obj.__dict__[self.name] = value
...
... def __delete__(self, obj):
... if self.name in obj.__dict__:
... del obj.__dict__[self.name]
... else:
... raise AttributeError(self.name)
...
... def __repr__(self):
... if isinstance(self.type, tuple) and len(self.type) > 1:
... typestr = '({0})'.format(
... ', '.join(t.__name__ for t in self.type))
... else:
... typestr = self.type.__name__
... return '<{0}(name={1}, type={2})>'.format(
... self.__class__.__name__, self.name, typestr)
...
Now let's create an example class that uses this ``TypedAttribute``::
>>> class Point2D(metaclass=OrderedDescriptorContainer):
... x = TypedAttribute((float, int))
... y = TypedAttribute((float, int))
...
... def __init__(self, x, y):
... self.x, self.y = x, y
...
>>> p1 = Point2D(1.0, 2.0)
>>> p1.x
1.0
>>> p1.y
2.0
>>> p2 = Point2D('a', 'b') # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError: Point2D.x must be of type (float, int>)
We see that ``TypedAttribute`` works more or less as advertised, but
there's nothing special about that. Let's see what
`OrderedDescriptorContainer` did for us::
>>> Point2D.typed_attributes
OrderedDict([('x', <TypedAttribute(name=x, type=(float, int))>),
('y', <TypedAttribute(name=y, type=(float, int))>)])
If we create a subclass, it does *not* by default add inherited descriptors
to ``typed_attributes``::
>>> class Point3D(Point2D):
... z = TypedAttribute((float, int))
...
>>> Point3D.typed_attributes
OrderedDict([('z', <TypedAttribute(name=z, type=(float, int))>)])
However, if we specify ``_inherit_descriptors_`` from ``Point2D`` then
it will do so::
>>> class Point3D(Point2D):
... _inherit_descriptors_ = (TypedAttribute,)
... z = TypedAttribute((float, int))
...
>>> Point3D.typed_attributes
OrderedDict([('x', <TypedAttribute(name=x, type=(float, int))>),
('y', <TypedAttribute(name=y, type=(float, int))>),
('z', <TypedAttribute(name=z, type=(float, int))>)])
.. note::
Hopefully it is clear from these examples that this construction
also allows a class of type `OrderedDescriptorContainer` to use
multiple different `OrderedDescriptor` classes simultaneously.
"""
_inherit_descriptors_ = ()
def __init__(cls, cls_name, bases, members):
descriptors = defaultdict(list)
seen = set()
inherit_descriptors = ()
descr_bases = {}
for mro_cls in cls.__mro__:
for name, obj in mro_cls.__dict__.items():
if name in seen:
# Checks if we've already seen an attribute of the given
# name (if so it will override anything of the same name in
# any base class)
continue
seen.add(name)
if (not isinstance(obj, OrderedDescriptor) or
(inherit_descriptors and
not isinstance(obj, inherit_descriptors))):
# The second condition applies when checking any
# subclasses, to see if we can inherit any descriptors of
# the given type from subclasses (by default inheritance is
# disabled unless the class has _inherit_descriptors_
# defined)
continue
if obj._name_attribute_ is not None:
setattr(obj, obj._name_attribute_, name)
# Don't just use the descriptor's class directly; instead go
# through its MRO and find the class on which _class_attribute_
# is defined directly. This way subclasses of some
# OrderedDescriptor *may* override _class_attribute_ and have
# its own _class_attribute_, but by default all subclasses of
# some OrderedDescriptor are still grouped together
# TODO: It might be worth clarifying this in the docs
if obj.__class__ not in descr_bases:
for obj_cls_base in obj.__class__.__mro__:
if '_class_attribute_' in obj_cls_base.__dict__:
descr_bases[obj.__class__] = obj_cls_base
descriptors[obj_cls_base].append((obj, name))
break
else:
# Make sure to put obj first for sorting purposes
obj_cls_base = descr_bases[obj.__class__]
descriptors[obj_cls_base].append((obj, name))
if not getattr(mro_cls, '_inherit_descriptors_', False):
# If _inherit_descriptors_ is undefined then we don't inherit
# any OrderedDescriptors from any of the base classes, and
# there's no reason to continue through the MRO
break
else:
inherit_descriptors = mro_cls._inherit_descriptors_
for descriptor_cls, instances in descriptors.items():
instances.sort()
instances = OrderedDict((key, value) for value, key in instances)
setattr(cls, descriptor_cls._class_attribute_, instances)
super(OrderedDescriptorContainer, cls).__init__(cls_name, bases,
members)
def get_parameters(members):
"""
Looks for ordered descriptors in a class definition and
copies such definitions in two new class attributes,
one being a dictionary of these objects keyed by their
attribute names, and the other a simple list of those names.
"""
pdict = OrderedDict()
for name, obj in members.items():
if (not isinstance(obj, OrderedDescriptor)):
continue
if obj._name_attribute_ is not None:
setattr(obj, '_name', name)
pdict[name] = obj
# members['_parameter_vals_'] = pdict
members['_parameters_'] = pdict
LOCALE_LOCK = threading.Lock()
@contextmanager
def set_locale(name):
"""
Context manager to temporarily set the locale to ``name``.
An example is setting locale to "C" so that the C strtod()
function will use "." as the decimal point to enable consistent
numerical string parsing.
Note that one cannot nest multiple set_locale() context manager
statements as this causes a threading lock.
This code taken from https://stackoverflow.com/questions/18593661/how-do-i-strftime-a-date-object-in-a-different-locale.
Parameters
==========
name : str
Locale name, e.g. "C" or "fr_FR".
"""
name = str(name)
with LOCALE_LOCK:
saved = locale.setlocale(locale.LC_ALL)
if saved == name:
# Don't do anything if locale is already the requested locale
yield
else:
try:
locale.setlocale(locale.LC_ALL, name)
yield
finally:
locale.setlocale(locale.LC_ALL, saved)
class ShapedLikeNDArray(metaclass=abc.ABCMeta):
"""Mixin class to provide shape-changing methods.
The class proper is assumed to have some underlying data, which are arrays
or array-like structures. It must define a ``shape`` property, which gives
the shape of those data, as well as an ``_apply`` method that creates a new
instance in which a `~numpy.ndarray` method has been applied to those.
Furthermore, for consistency with `~numpy.ndarray`, it is recommended to
define a setter for the ``shape`` property, which, like the
`~numpy.ndarray.shape` property allows in-place reshaping the internal data
(and, unlike the ``reshape`` method raises an exception if this is not
possible).
This class also defines default implementations for ``ndim`` and ``size``
properties, calculating those from the ``shape``. These can be overridden
by subclasses if there are faster ways to obtain those numbers.
"""
# Note to developers: if new methods are added here, be sure to check that
# they work properly with the classes that use this, such as Time and
# BaseRepresentation, i.e., look at their ``_apply`` methods and add
# relevant tests. This is particularly important for methods that imply
# copies rather than views of data (see the special-case treatment of
# 'flatten' in Time).
@property
@abc.abstractmethod
def shape(self):
"""The shape of the instance and underlying arrays."""
@abc.abstractmethod
def _apply(method, *args, **kwargs):
"""Create a new instance, with ``method`` applied to underlying data.
The method is any of the shape-changing methods for `~numpy.ndarray`
(``reshape``, ``swapaxes``, etc.), as well as those picking particular
elements (``__getitem__``, ``take``, etc.). It will be applied to the
underlying arrays (e.g., ``jd1`` and ``jd2`` in `~astropy.time.Time`),
with the results used to create a new instance.
Parameters
----------
method : str
Method to be applied to the instance's internal data arrays.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``.
"""
@property
def ndim(self):
"""The number of dimensions of the instance and underlying arrays."""
return len(self.shape)
@property
def size(self):
"""The size of the object, as calculated from its shape."""
size = 1
for sh in self.shape:
size *= sh
return size
@property
def isscalar(self):
return self.shape == ()
def __len__(self):
if self.isscalar:
raise TypeError("Scalar {!r} object has no len()"
.format(self.__class__.__name__))
return self.shape[0]
def __bool__(self):
"""Any instance should evaluate to True, except when it is empty."""
return self.size > 0
def __getitem__(self, item):
try:
return self._apply('__getitem__', item)
except IndexError:
if self.isscalar:
raise TypeError('scalar {!r} object is not subscriptable.'
.format(self.__class__.__name__))
else:
raise
def __iter__(self):
if self.isscalar:
raise TypeError('scalar {!r} object is not iterable.'
.format(self.__class__.__name__))
# We cannot just write a generator here, since then the above error
# would only be raised once we try to use the iterator, rather than
# upon its definition using iter(self).
def self_iter():
for idx in range(len(self)):
yield self[idx]
return self_iter()
def copy(self, *args, **kwargs):
"""Return an instance containing copies of the internal data.
Parameters are as for :meth:`~numpy.ndarray.copy`.
"""
return self._apply('copy', *args, **kwargs)
def reshape(self, *args, **kwargs):
"""Returns an instance containing the same data with a new shape.
Parameters are as for :meth:`~numpy.ndarray.reshape`. Note that it is
not always possible to change the shape of an array without copying the
data (see :func:`~numpy.reshape` documentation). If you want an error
to be raise if the data is copied, you should assign the new shape to
the shape attribute (note: this may not be implemented for all classes
using ``ShapedLikeNDArray``).
"""
return self._apply('reshape', *args, **kwargs)
def ravel(self, *args, **kwargs):
"""Return an instance with the array collapsed into one dimension.
Parameters are as for :meth:`~numpy.ndarray.ravel`. Note that it is
not always possible to unravel an array without copying the data.
If you want an error to be raise if the data is copied, you should
should assign shape ``(-1,)`` to the shape attribute.
"""
return self._apply('ravel', *args, **kwargs)
def flatten(self, *args, **kwargs):
"""Return a copy with the array collapsed into one dimension.
Parameters are as for :meth:`~numpy.ndarray.flatten`.
"""
return self._apply('flatten', *args, **kwargs)
def transpose(self, *args, **kwargs):
"""Return an instance with the data transposed.
Parameters are as for :meth:`~numpy.ndarray.transpose`. All internal
data are views of the data of the original.
"""
return self._apply('transpose', *args, **kwargs)
@property
def T(self):
"""Return an instance with the data transposed.
Parameters are as for :attr:`~numpy.ndarray.T`. All internal
data are views of the data of the original.
"""
if self.ndim < 2:
return self
else:
return self.transpose()
def swapaxes(self, *args, **kwargs):
"""Return an instance with the given axes interchanged.
Parameters are as for :meth:`~numpy.ndarray.swapaxes`:
``axis1, axis2``. All internal data are views of the data of the
original.
"""
return self._apply('swapaxes', *args, **kwargs)
def diagonal(self, *args, **kwargs):
"""Return an instance with the specified diagonals.
Parameters are as for :meth:`~numpy.ndarray.diagonal`. All internal
data are views of the data of the original.
"""
return self._apply('diagonal', *args, **kwargs)
def squeeze(self, *args, **kwargs):
"""Return an instance with single-dimensional shape entries removed
Parameters are as for :meth:`~numpy.ndarray.squeeze`. All internal
data are views of the data of the original.
"""
return self._apply('squeeze', *args, **kwargs)
def take(self, indices, axis=None, mode='raise'):
"""Return a new instance formed from the elements at the given indices.
Parameters are as for :meth:`~numpy.ndarray.take`, except that,
obviously, no output array can be given.
"""
return self._apply('take', indices, axis=axis, mode=mode)
class IncompatibleShapeError(ValueError):
def __init__(self, shape_a, shape_a_idx, shape_b, shape_b_idx):
super().__init__(shape_a, shape_a_idx, shape_b, shape_b_idx)
def check_broadcast(*shapes):
"""
Determines whether two or more Numpy arrays can be broadcast with each
other based on their shape tuple alone.
Parameters
----------
*shapes : tuple
All shapes to include in the comparison. If only one shape is given it
is passed through unmodified. If no shapes are given returns an empty
`tuple`.
Returns
-------
broadcast : `tuple`
If all shapes are mutually broadcastable, returns a tuple of the full
broadcast shape.
"""
if len(shapes) == 0:
return ()
elif len(shapes) == 1:
return shapes[0]
reversed_shapes = (reversed(shape) for shape in shapes)
full_shape = []
for dims in zip_longest(*reversed_shapes, fillvalue=1):
max_dim = 1
max_dim_idx = None
for idx, dim in enumerate(dims):
if dim == 1:
continue
if max_dim == 1:
# The first dimension of size greater than 1
max_dim = dim
max_dim_idx = idx
elif dim != max_dim:
raise IncompatibleShapeError(
shapes[max_dim_idx], max_dim_idx, shapes[idx], idx)
full_shape.append(max_dim)
return tuple(full_shape[::-1])
def dtype_bytes_or_chars(dtype):
"""
Parse the number out of a dtype.str value like '<U5' or '<f8'.
See #5819 for discussion on the need for this function for getting
the number of characters corresponding to a string dtype.
Parameters
----------
dtype : numpy dtype object
Input dtype
Returns
-------
bytes_or_chars : int or None
Bits (for numeric types) or characters (for string types)
"""
match = re.search(r'(\d+)$', dtype.str)
out = int(match.group(1)) if match else None
return out
def pizza(): # pragma: no cover
"""
Open browser loaded with pizza options near you.
*Disclaimers: Payments not included. Astropy is not
responsible for any liability from using this function.*
.. note:: Accuracy depends on your browser settings.
"""
import webbrowser
webbrowser.open('https://www.google.com/search?q=pizza+near+me')
|
bsd-3-clause
| -8,344,829,424,143,748,000
| 33.369991
| 124
| 0.597331
| false
| 4.412389
| false
| false
| false
|
CoreSecurity/pysap
|
pysap/utils/fields.py
|
1
|
12226
|
# ===========
# pysap - Python library for crafting SAP's network protocols packets
#
# SECUREAUTH LABS. Copyright (C) 2021 SecureAuth Corporation. All rights reserved.
#
# The library was designed and developed by Martin Gallo from
# the SecureAuth's Innovation Labs team.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# ==============
# Standard imports
import struct
from datetime import datetime
# External imports
from scapy.config import conf
from scapy.packet import Packet
from scapy.asn1fields import (ASN1F_CHOICE, ASN1F_field, ASN1_Error, ASN1F_badsequence, BER_Decoding_Error)
from scapy.volatile import (RandNum, RandTermString, RandBin)
from scapy.fields import (MultiEnumField, StrLenField, Field, StrFixedLenField, StrField, PacketListField, LongField)
def saptimestamp_to_datetime(timestamp):
"""Converts a timestamp in "SAP format" to a datetime object. Time zone
looks to be fixed at GMT+1."""
return datetime.utcfromtimestamp((int(timestamp) & 0xFFFFFFFF) + 1000000000)
class PacketNoPadded(Packet):
"""Regular scapy packet with no padding.
"""
def extract_padding(self, s):
return '', s
class RandByteReduced(RandNum):
"""RandByte that only returns random values between 0 and x2a. Used while
performing some fuzz to reduce the test cases space.
"""
def __init__(self):
RandNum.__init__(self, 0, 0x2a)
class ByteMultiEnumKeysField(MultiEnumField):
"""MultiEnumField that picks a reduced number of values. Used for fuzzing
Byte fields with reduced number of values.
"""
def randval(self):
return RandByteReduced()
class MutablePacketField(StrLenField):
"""Packet field that mutates the class according to a list of evaluators.
The evaluators are run against the packet and given to a class getter.
If the class can't be found, the field is treated as a StrLenField.
"""
__slots__ = ["length_from", "evaluators", "_get_class"]
def __init__(self, name, default, length_from, get_class, evaluators=None):
"""
:param length_from: function to obtain the field length
:type length_from: C{callable}
:param get_class: function to obtain the class
:type get_class: C{callable}
:param evaluators: evaluators
:type evaluators: ``list`` of C{callable}
"""
StrLenField.__init__(self, name, default, length_from=length_from)
self.evaluators = evaluators or []
self._get_class = get_class
def get_class(self, pkt):
# Run the evaluators on the actual packet
values = [evaluator(pkt) for evaluator in self.evaluators]
# Return the class using the function provided
return self._get_class(pkt, *values)
def i2m(self, pkt, i):
cls = self.get_class(pkt)
if cls is not None:
return str(i)
else:
return StrLenField.i2m(self, pkt, i)
def m2i(self, pkt, m):
cls = self.get_class(pkt)
if cls is not None:
return cls(m)
else:
return StrLenField.m2i(self, pkt, m)
class StrNullFixedLenField(StrFixedLenField):
"""Packet field that has a fixed length and is conditionally null-terminated.
"""
__slots__ = ["length_from", "max_length", "null_terminated"]
def __init__(self, name, default, length=None, length_from=None, max_length=None, null_terminated=None):
if null_terminated:
self.null_terminated = null_terminated
else:
self.null_terminated = lambda pkt: True
self.max_length = max_length or 200
StrFixedLenField.__init__(self, name, default, length=length, length_from=length_from)
def i2repr(self, pkt, v):
if self.null_terminated(pkt):
if type(v) is str:
v = v.rstrip("\0")
return repr(v)
return StrFixedLenField.i2repr(self, pkt, v)
def getfield(self, pkt, s):
if self.null_terminated(pkt):
l = self.length_from(pkt) - 1
return s[l + 1:], self.m2i(pkt, s[:l])
return StrFixedLenField.getfield(self, pkt, s)
def addfield(self, pkt, s, val):
if self.null_terminated(pkt):
l = self.length_from(pkt) - 1
return s + struct.pack("%is" % l, self.i2m(pkt, val)) + "\x00"
return StrFixedLenField.addfield(self, pkt, s, val)
def randval(self):
if self.null_terminated:
try:
l = self.length_from(None) - 1
except:
l = RandTermString(RandNum(0, self.max_length), "\x00")
return RandBin(l)
return StrFixedLenField.randval(self)
class StrFixedLenPaddedField(StrFixedLenField):
"""Packet field that has a fixed length and is padded with a
given character.
"""
__slots__ = ["length_from", "padd"]
def __init__(self, name, default, length=None, length_from=None, padd=" "):
StrFixedLenField.__init__(self, name, default, length, length_from)
self.padd = padd
def getfield(self, pkt, s):
l = self.length_from(pkt)
return s[l:], self.m2i(pkt, s[:l])
def addfield(self, pkt, s, val):
l = self.length_from(pkt)
val += self.padd * l
return StrFixedLenField.addfield(self, pkt, s, val)
class StrNullFixedLenPaddedField(StrFixedLenField):
"""Packet field that has a fixed length and is padded with a
given character and null terminated.
"""
__slots__ = ["length_from", "padd"]
def __init__(self, name, default, length=None, length_from=None, padd=" "):
StrFixedLenField.__init__(self, name, default, length, length_from)
self.padd = padd
def getfield(self, pkt, s):
l = self.length_from(pkt)
lz = s.find("\x00")
if lz < l:
return s[l + 1:], self.m2i(pkt, s[:lz])
return s[l + 1:], self.m2i(pkt, s[:l])
def addfield(self, pkt, s, val):
l = self.length_from(pkt)
val += self.padd * l
return StrFixedLenField.addfield(self, pkt, s, val)
class IntToStrField(Field):
"""Custom field from int to str values, with a variable length
"""
__slots__ = ["length", "format"]
def __init__(self, name, default, length=11):
"""Initialize the field with a variable length. The 'machine'
representation is a string field and the 'internal' repr.
is a numeric value.
"""
Field.__init__(self, name, default, "%ds" % length)
# Stores the length of the field
self.length = length
# Stores the conversion format between representations
self.format = "%" + "%d" % length + "d"
def m2i(self, pkt, x):
return str(x)
def i2m(self, pkt, x):
return self.format % int(x)
def i2count(self, pkt, x):
return x
class StrEncodedPaddedField(StrField):
__slots__ = ["remain", "encoding", "padd"]
def __init__(self, name, default, encoding="utf-16", padd="\x0c",
fmt="H", remain=0):
StrField.__init__(self, name, default, fmt, remain)
self.encoding = encoding
self.padd = padd
def h2i(self, pkt, x):
if x:
x = x.encode(self.encoding)
return x
def i2h(self, pkt, x):
if x:
x = x.decode(self.encoding)
return x
def addfield(self, pkt, s, val):
return s + self.i2m(pkt, val) + self.padd
def getfield(self, pkt, s):
l = s.find(self.padd)
if l < 0:
return "", s
return s[l + 1:], self.m2i(pkt, s[:l])
class PacketListStopField(PacketListField):
"""Custom field that contains a list of packets until a 'stop' condition is met.
"""
__slots__ = ["count_from", "length_from", "stop"]
def __init__(self, name, default, cls, count_from=None, length_from=None, stop=None):
PacketListField.__init__(self, name, default, cls, count_from=count_from, length_from=length_from)
self.stop = stop
def getfield(self, pkt, s):
c = l = None
if self.length_from is not None:
l = self.length_from(pkt)
elif self.count_from is not None:
c = self.count_from(pkt)
lst = []
ret = ""
remain = s
if l is not None:
remain, ret = s[:l], s[l:]
while remain:
if c is not None:
if c <= 0:
break
c -= 1
try:
p = self.m2i(pkt, remain)
except Exception:
if conf.debug_dissector:
raise
p = conf.raw_layer(load=remain)
remain = ""
else:
if conf.padding_layer in p:
pad = p[conf.padding_layer]
remain = pad.load
del (pad.underlayer.payload)
else:
remain = ""
lst.append(p)
# Evaluate the stop condition
if self.stop and self.stop(p):
break
return remain + ret, lst
class AdjustableFieldLenField(Field):
__slots__ = ["length_of", "count_of", "adjust"]
def __init__(self, name, default, length_of=None):
Field.__init__(self, name, default, ">H")
self.length_of = length_of
def i2m(self, pkt, x):
if x is None:
fld, fval = pkt.getfield_and_val(self.length_of)
x = fld.i2len(pkt, fval)
return x
def addfield(self, pkt, s, val):
i2m = self.i2m(pkt, val)
fmt = "B"
padd = ""
if i2m > 0xf0:
fmt = ">H"
padd = struct.pack("B", 0xff)
return s + padd + struct.pack(fmt, i2m)
def getfield(self, pkt, s):
if struct.unpack("B", s[:1])[0] == 0xff:
return s[3:], self.m2i(pkt, struct.unpack(">H", s[1:3])[0])
else:
return s[1:], self.m2i(pkt, struct.unpack("B", s[:1])[0])
class ASN1F_CHOICE_SAFE(ASN1F_CHOICE):
def __init__(self, name, default, *args, **kwargs):
if "implicit_tag" in kwargs:
err_msg = "ASN1F_CHOICE has been called with an implicit_tag"
raise ASN1_Error(err_msg)
self.implicit_tag = None
for kwarg in ["context", "explicit_tag"]:
if kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
else:
setattr(self, kwarg, None)
ASN1F_field.__init__(self, name, None, context=self.context,
explicit_tag=self.explicit_tag)
self.default = default
self.current_choice = None
self.choices = args
def m2i(self, pkt, s):
"""Try to safely extract an ASN1_Packet from the choices list.
:raise ASN1_Error: if unable to parse the packet using any of the given choices
"""
if len(s) == 0:
raise ASN1_Error("ASN1F_CHOICE: got empty string")
for choice in self.choices:
try:
return self.extract_packet(choice, s)
except (ASN1_Error, ASN1F_badsequence, BER_Decoding_Error):
pass
raise ASN1_Error
class TimestampField(LongField):
"""Timestamp field"""
def i2h(self, pkt, x):
dt = datetime.utcfromtimestamp(x)
return dt.strftime("%Y-%m-%d %H:%M:%S UTC")
class LESignedByteField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "<b")
class LESignedShortField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "<h")
class LESignedLongField(Field):
def __init__(self, name, default):
Field.__init__(self, name, default, "<q")
|
gpl-2.0
| 3,712,130,716,518,683,000
| 31.515957
| 117
| 0.586128
| false
| 3.613952
| false
| false
| false
|
mbiokyle29/pipelines
|
EBseq/ebseq_extras.py
|
1
|
3684
|
import os.path
import re
# don't use slots since we only have a few of these guys
class _sampleRec():
def __init__(self, name, mean, std, condition):
self.name = name
self.mean = int(mean)
self.std = int(std)
self.condition = int(condition)
class EbseqExtras():
def __init__(self, log):
self.log = log
self.samples = []
self.conditions = {}
def read_configuration(self, conf):
if os.path.isfile(conf):
try:
with open(conf, "r") as fh:
for line in fh:
self._build_rec(line)
except IOError as e:
self.log.error("IOError thrown trying to read %s conf file, perhap permissions?", conf)
raise SystemExit
else:
self.log.error("It appears %s does not exist", conf)
raise SystemExit
def _build_rec(self, line):
# <sample><frag-mean><frag-sd><cond>
rec = _sampleRec(*line.split("\t"))
self.samples.append(rec)
if rec.condition in self.conditions:
self.conditions[rec.condition].append(rec)
else:
self.conditions[rec.condition] = [rec]
def gen_fastq_list(self):
results = []
for sample in self.samples:
results.append(sample.name)
return results
def gen_sample_list(self):
sample_str = ""
for cond in sorted(self.conditions.keys()):
for rec in self.conditions[cond]:
name = re.sub(r"\.fastq", ".genes.results", rec.name)
sample_str += name+" "
return sample_str.rstrip()
def get_mean_length(self, file):
base = os.path.splitext(file)[0]
for sample in self.samples:
sample_base = os.path.splitext(sample.name)[0
]
if base == sample_base:
return sample.mean
# if it wasnt found
raise SystemError
def gen_cond_string(self):
# if conditions has {1}:[2], {2}:[2], {3}:[2]
# we want 2,2,2
cond_str = ""
for condition in sorted(self.conditions.keys()):
cond_str += str(len(self.conditions[condition]))+","
return cond_str.rstrip(",")
def report_error(self, message):
# Create a text/plain message
email_body = []
email_body.append("Hello, Kyle\n")
email_body.append("Pipeline failed with the following error: ")
email_body.append(message)
# grab the log file name from the log
# we add the file handler first
# so its here
log_file = self.log.handlers[0].baseFilename
email_body.append("\n#######################################################")
email_body.append("# PIPELINE LOG #")
email_body.append("#######################################################")
with open(log_file, "r") as log:
for line in log:
email_body.append(line.rstrip())
msg = MIMEText("\n".join(email_body))
# header stuff
# no one else cares but me!
root = "root@alpha-helix.oncology.wisc.edu"
me = "mbio.kyle@gmail.com"
subject = "RSEM/EBseq pipeline failure report: {}".format(time.strftime("%d/%m/%Y"))
msg['Subject'] = subject
msg['From'] = root
msg['To'] = me
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(root, [me], msg.as_string())
s.quit()
|
mit
| -1,907,952,357,245,468,400
| 29.708333
| 103
| 0.51683
| false
| 4.070718
| false
| false
| false
|
taigaio/taiga-back
|
taiga/export_import/management/commands/dump_project.py
|
1
|
2981
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.management.base import BaseCommand, CommandError
from taiga.projects.models import Project
from taiga.export_import.services import render_project
import os
import gzip
class Command(BaseCommand):
help = "Export projects to a json file"
def add_arguments(self, parser):
parser.add_argument("project_slugs",
nargs="+",
help="<project_slug project_slug ...>")
parser.add_argument("-d", "--dst_dir",
action="store",
dest="dst_dir",
default="./",
metavar="DIR",
help="Directory to save the json files. ('./' by default)")
parser.add_argument("-f", "--format",
action="store",
dest="format",
default="plain",
metavar="[plain|gzip]",
help="Format to the output file plain json or gzipped json. ('plain' by default)")
def handle(self, *args, **options):
dst_dir = options["dst_dir"]
if not os.path.exists(dst_dir):
raise CommandError("Directory {} does not exist.".format(dst_dir))
if not os.path.isdir(dst_dir):
raise CommandError("'{}' must be a directory, not a file.".format(dst_dir))
project_slugs = options["project_slugs"]
for project_slug in project_slugs:
try:
project = Project.objects.get(slug=project_slug)
except Project.DoesNotExist:
raise CommandError("Project '{}' does not exist".format(project_slug))
if options["format"] == "gzip":
dst_file = os.path.join(dst_dir, "{}.json.gz".format(project_slug))
with gzip.GzipFile(dst_file, "wb") as f:
render_project(project, f)
else:
dst_file = os.path.join(dst_dir, "{}.json".format(project_slug))
with open(dst_file, "wb") as f:
render_project(project, f)
print("-> Generate dump of project '{}' in '{}'".format(project.name, dst_file))
|
agpl-3.0
| 5,321,887,911,599,760,000
| 39.283784
| 110
| 0.571956
| false
| 4.429421
| false
| false
| false
|
GoogleCloudPlatform/PerfKitBenchmarker
|
perfkitbenchmarker/linux_packages/maven.py
|
1
|
4623
|
# Copyright 2020 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
"""Module containing maven installation functions."""
import os
import posixpath
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import linux_packages
from six.moves.urllib.parse import urlparse
flags.DEFINE_string('maven_version', '3.6.3',
'The version of maven')
flags.DEFINE_string('maven_mirror_url', None,
'If specified, this URL will be used as a Maven mirror')
FLAGS = flags.FLAGS
MVN_URL = 'https://archive.apache.org/dist/maven/maven-{0}/{1}/binaries/apache-maven-{1}-bin.tar.gz'
MVN_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'maven')
MVN_ENV_PATH = '/etc/profile.d/maven.sh'
MVN_ENV = '''
export JAVA_HOME={java_home}
export M2_HOME={maven_home}
export MAVEN_HOME={maven_home}
export PATH={maven_home}/bin:$PATH
'''
PACKAGE_NAME = 'maven'
PREPROVISIONED_DATA = {
'apache-maven-{0}-bin.tar.gz'.format('3.6.1'):
'2528c35a99c30f8940cc599ba15d34359d58bec57af58c1075519b8cd33b69e7',
'apache-maven-{0}-bin.tar.gz'.format('3.6.3'):
'26ad91d751b3a9a53087aefa743f4e16a17741d3915b219cf74112bf87a438c5'
}
PACKAGE_DATA_URL = {
'apache-maven-{0}-bin.tar.gz'.format('3.6.1'): MVN_URL.format('3', '3.6.1'),
'apache-maven-{0}-bin.tar.gz'.format('3.6.3'): MVN_URL.format('3', '3.6.3')
}
def GetRunCommand(arguments):
"""Return Maven run command including proxy settings."""
command = 'source {} && mvn {}'.format(MVN_ENV_PATH, arguments)
if FLAGS['http_proxy'].present:
parsed_url = urlparse(FLAGS.http_proxy)
http_proxy_params = ' -Dhttp.proxyHost={host} -Dhttp.proxyPort={port}'
command += http_proxy_params.format(
host=parsed_url.hostname, port=parsed_url.port)
if FLAGS['https_proxy'].present:
parsed_url = urlparse(FLAGS.https_proxy)
https_proxy_params = ' -Dhttps.proxyHost={host} -Dhttps.proxyPort={port}'
command += https_proxy_params.format(
host=parsed_url.hostname, port=parsed_url.port)
return command
def _GetJavaHome(vm):
out, _ = vm.RemoteCommand("java -XshowSettings:properties 2>&1 > /dev/null "
"| awk '/java.home/{print $3}'")
out = out.strip()
if '/jre' in out:
return out[:out.index('/jre')]
else:
return out
def AptInstall(vm):
_Install(vm)
def YumInstall(vm):
vm.InstallPackages('which')
_Install(vm)
def _Install(vm):
"""Install maven package."""
vm.Install('openjdk')
vm.Install('curl')
# Download and extract maven
maven_full_ver = FLAGS.maven_version
maven_major_ver = maven_full_ver[:maven_full_ver.index('.')]
maven_url = MVN_URL.format(maven_major_ver, maven_full_ver)
maven_tar = maven_url.split('/')[-1]
# will only work with preprovision_ignore_checksum
if maven_tar not in PREPROVISIONED_DATA:
PREPROVISIONED_DATA[maven_tar] = ''
PACKAGE_DATA_URL[maven_tar] = maven_url
maven_remote_path = posixpath.join(linux_packages.INSTALL_DIR, maven_tar)
vm.InstallPreprovisionedPackageData(PACKAGE_NAME, [maven_tar],
linux_packages.INSTALL_DIR)
vm.RemoteCommand(('mkdir -p {0} && '
'tar -C {0} --strip-components=1 -xzf {1}').format(
MVN_DIR, maven_remote_path))
java_home = _GetJavaHome(vm)
# Set env variables for maven
maven_env = MVN_ENV.format(java_home=java_home, maven_home=MVN_DIR)
cmd = 'echo "{0}" | sudo tee -a {1}'.format(maven_env, MVN_ENV_PATH)
vm.RemoteCommand(cmd)
if FLAGS.maven_mirror_url:
settings_local_path = data.ResourcePath(os.path.join(
'maven', 'settings.xml.j2'))
settings_remote_path = '~/.m2/settings.xml'
context = {
'maven_mirror_url': FLAGS.maven_mirror_url
}
vm.RemoteCommand('mkdir -p ~/.m2')
vm.RenderTemplate(settings_local_path, settings_remote_path, context)
def Uninstall(vm):
vm.Uninstall('openjdk')
vm.RemoteCommand('rm -rf {0}'.format(MVN_DIR), ignore_failure=True)
vm.RemoteCommand('sudo rm -f {0}'.format(MVN_ENV_PATH), ignore_failure=True)
|
apache-2.0
| -5,603,522,190,831,945,000
| 34.022727
| 100
| 0.679645
| false
| 3.039448
| false
| false
| false
|
mindbody/API-Examples
|
SDKs/Python/swagger_client/models/time_clock_report.py
|
1
|
7634
|
# coding: utf-8
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.time_card_event import TimeCardEvent # noqa: F401,E501
class TimeClockReport(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'staff_id': 'int',
'task': 'str',
'hourly_rate': 'float',
'total_hours': 'float',
'total_pay': 'float',
'time_cards': 'list[TimeCardEvent]'
}
attribute_map = {
'staff_id': 'StaffId',
'task': 'Task',
'hourly_rate': 'HourlyRate',
'total_hours': 'TotalHours',
'total_pay': 'TotalPay',
'time_cards': 'TimeCards'
}
def __init__(self, staff_id=None, task=None, hourly_rate=None, total_hours=None, total_pay=None, time_cards=None): # noqa: E501
"""TimeClockReport - a model defined in Swagger""" # noqa: E501
self._staff_id = None
self._task = None
self._hourly_rate = None
self._total_hours = None
self._total_pay = None
self._time_cards = None
self.discriminator = None
if staff_id is not None:
self.staff_id = staff_id
if task is not None:
self.task = task
if hourly_rate is not None:
self.hourly_rate = hourly_rate
if total_hours is not None:
self.total_hours = total_hours
if total_pay is not None:
self.total_pay = total_pay
if time_cards is not None:
self.time_cards = time_cards
@property
def staff_id(self):
"""Gets the staff_id of this TimeClockReport. # noqa: E501
The ID of the requested staff member. # noqa: E501
:return: The staff_id of this TimeClockReport. # noqa: E501
:rtype: int
"""
return self._staff_id
@staff_id.setter
def staff_id(self, staff_id):
"""Sets the staff_id of this TimeClockReport.
The ID of the requested staff member. # noqa: E501
:param staff_id: The staff_id of this TimeClockReport. # noqa: E501
:type: int
"""
self._staff_id = staff_id
@property
def task(self):
"""Gets the task of this TimeClockReport. # noqa: E501
The staff member’s job title. # noqa: E501
:return: The task of this TimeClockReport. # noqa: E501
:rtype: str
"""
return self._task
@task.setter
def task(self, task):
"""Sets the task of this TimeClockReport.
The staff member’s job title. # noqa: E501
:param task: The task of this TimeClockReport. # noqa: E501
:type: str
"""
self._task = task
@property
def hourly_rate(self):
"""Gets the hourly_rate of this TimeClockReport. # noqa: E501
The hourly rate the business pays for this job. # noqa: E501
:return: The hourly_rate of this TimeClockReport. # noqa: E501
:rtype: float
"""
return self._hourly_rate
@hourly_rate.setter
def hourly_rate(self, hourly_rate):
"""Sets the hourly_rate of this TimeClockReport.
The hourly rate the business pays for this job. # noqa: E501
:param hourly_rate: The hourly_rate of this TimeClockReport. # noqa: E501
:type: float
"""
self._hourly_rate = hourly_rate
@property
def total_hours(self):
"""Gets the total_hours of this TimeClockReport. # noqa: E501
The sum of the hours worked by the staff member in this time card report. # noqa: E501
:return: The total_hours of this TimeClockReport. # noqa: E501
:rtype: float
"""
return self._total_hours
@total_hours.setter
def total_hours(self, total_hours):
"""Sets the total_hours of this TimeClockReport.
The sum of the hours worked by the staff member in this time card report. # noqa: E501
:param total_hours: The total_hours of this TimeClockReport. # noqa: E501
:type: float
"""
self._total_hours = total_hours
@property
def total_pay(self):
"""Gets the total_pay of this TimeClockReport. # noqa: E501
The total amount earned by the staff member for this time card report. # noqa: E501
:return: The total_pay of this TimeClockReport. # noqa: E501
:rtype: float
"""
return self._total_pay
@total_pay.setter
def total_pay(self, total_pay):
"""Sets the total_pay of this TimeClockReport.
The total amount earned by the staff member for this time card report. # noqa: E501
:param total_pay: The total_pay of this TimeClockReport. # noqa: E501
:type: float
"""
self._total_pay = total_pay
@property
def time_cards(self):
"""Gets the time_cards of this TimeClockReport. # noqa: E501
Information about when a staff member began and ended a task. # noqa: E501
:return: The time_cards of this TimeClockReport. # noqa: E501
:rtype: list[TimeCardEvent]
"""
return self._time_cards
@time_cards.setter
def time_cards(self, time_cards):
"""Sets the time_cards of this TimeClockReport.
Information about when a staff member began and ended a task. # noqa: E501
:param time_cards: The time_cards of this TimeClockReport. # noqa: E501
:type: list[TimeCardEvent]
"""
self._time_cards = time_cards
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TimeClockReport, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimeClockReport):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
bsd-2-clause
| -8,113,522,800,906,684,000
| 28.459459
| 132
| 0.573788
| false
| 3.898825
| false
| false
| false
|
SymbiFlow/sv-tests
|
tools/runners/Yosys.py
|
1
|
1388
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
import os
from BaseRunner import BaseRunner
class Yosys(BaseRunner):
def __init__(self):
super().__init__("yosys", "yosys")
self.url = "http://www.clifford.at/yosys/"
def prepare_run_cb(self, tmp_dir, params):
run = os.path.join(tmp_dir, "run.sh")
scr = os.path.join(tmp_dir, 'scr.ys')
inc = ""
for incdir in params['incdirs']:
inc += f' -I {incdir}'
defs = ""
for define in params['defines']:
defs += f' -D {define}'
# prepare yosys script
with open(scr, 'w') as f:
for svf in params['files']:
f.write(f'read_verilog -sv {inc} {defs} {svf}\n')
# prepare wrapper script
with open(run, 'w') as f:
f.write('set -x\n')
f.write(f'cat {scr}\n')
f.write(f'{self.executable} -Q -T {scr}\n')
self.cmd = ['sh', run]
def get_version_cmd(self):
return [self.executable, "-V"]
def get_version(self):
version = super().get_version()
return " ".join([self.name, version.split()[1]])
|
isc
| -7,568,493,084,167,110,000
| 24.703704
| 65
| 0.54755
| false
| 3.242991
| false
| false
| false
|
fedora-conary/conary
|
conary/trovetup.py
|
1
|
7716
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from conary.deps import deps
from conary import errors
from conary import versions
from conary.lib.compat import namedtuple as _namedtuple
class TroveSpec(_namedtuple('TroveSpec', 'name version flavor')):
"""
A trove spec is a partial trove specification. It contains an optionally
optional name, an optional version specification, and an optional flavor.
The version specification may be a full version, a branch, a label,
a revision or partial revision, or a label plus a revision or partial
revision.
"""
__slots__ = ()
def __new__(cls, name, version=None, flavor=None,
allowEmptyName=True, withFrozenFlavor=False):
"""
@param name: the input string or tuple
@type name: string or tuple
@param version: optional version, if version not included in name
@type version: string
@param flavor: optional version, if version not included in name
@type flavor: string, or frozen flavor if C{withFrozenFlavor} is True.
@param allowEmptyName: if set, will accept an empty string and some
other variations.
@type allowEmptyName: bool
@param withFrozenFlavor: if set, will accept a frozen flavor
@type withFrozenFlavor: bool
@raise errors.TroveSpecError: Raised if the input string is not
a valid TroveSpec
"""
if isinstance(name, (tuple, list)):
# TroveSpec(sometuple)
name, version, flavor = name
elif version is None and flavor is None:
# TroveSpec('a=b[c]')
return cls.fromString(name, allowEmptyName=allowEmptyName,
withFrozenFlavor=withFrozenFlavor)
# TroveSpec(name, version, flavor)
if isinstance(flavor, basestring):
flavor = cls._thawFlavor(flavor, withFrozenFlavor)
return tuple.__new__(cls, (name, version, flavor))
def __repr__(self):
return 'TroveSpec(%r)' % (self.asString(True),)
def asString(self, withTimestamp=False):
if self.version is not None:
version = '=' + self.version
else:
version = ''
if self.flavor is not None:
flavor = '[' + str(self.flavor) + ']'
else:
flavor = ''
return ''.join((self.name, version, flavor))
__str__ = asString
@staticmethod
def _thawFlavor(flavor, withFrozenFlavor):
if withFrozenFlavor:
return deps.ThawFlavor(flavor)
return deps.parseFlavor(flavor)
@classmethod
def fromString(cls, specStr, allowEmptyName=True, withFrozenFlavor=False):
origSpecStr = specStr
# CNY-3219: strip leading and trailing whitespaces around job
# specification
specStr = specStr.strip()
if specStr.find('[') > 0 and specStr[-1] == ']':
specStr = specStr[:-1]
l = specStr.split('[')
if len(l) != 2:
raise errors.TroveSpecError(origSpecStr, "bad flavor spec")
specStr, flavorSpec = l
flavor = cls._thawFlavor(flavorSpec, withFrozenFlavor)
if flavor is None:
raise errors.TroveSpecError(origSpecStr, "bad flavor spec")
else:
flavor = None
if specStr.find("=") >= 0:
l = specStr.split("=")
if len(l) != 2:
raise errors.TroveSpecError(origSpecStr, "Too many ='s")
name, versionSpec = l
else:
name = specStr
versionSpec = None
if not name and not allowEmptyName:
raise errors.TroveSpecError(origSpecStr, 'Trove name is required')
return tuple.__new__(cls, (name, versionSpec, flavor))
class TroveTuple(_namedtuple('TroveTuple', 'name version flavor')):
"""
A trove tuple is a (name, version, flavor) tuple that uniquely identifies a
single trove. It is always an exact reference.
For a partial specification, see L{TroveSpec}.
"""
# NOTE to future developers: if a version of TroveTuple with timestampless
# versions becomes useful, subclass it instead of kludging this one to
# support both. You should really never be in a situation where you don't
# know whether your version has timestamps!
__slots__ = ()
hasTimestamp = True
_thawVerFunc = staticmethod(versions.ThawVersion)
_thawFlavFunc = staticmethod(deps.parseFlavor)
def __new__(cls, name, version=None, flavor=None):
if isinstance(name, (tuple, list)):
# TroveTuple(sometuple)
name, version, flavor = name
elif version is None and flavor is None:
# TroveTuple('a=b[c]')
return cls.fromString(name)
# TroveTuple(name, version, flavor)
if isinstance(version, basestring):
version = cls._thawVerFunc(version)
if isinstance(flavor, basestring):
flavor = cls._thawFlavFunc(flavor)
return tuple.__new__(cls, (name, version, flavor))
def __repr__(self):
return 'TroveTuple(%r)' % (self.asString(True),)
def asString(self, withTimestamp=False):
if withTimestamp:
ver = self.version.freeze()
else:
ver = self.version.asString()
return '%s=%s[%s]' % (self.name, ver, self.flavor)
__str__ = asString
@classmethod
def fromString(cls, ttstr, withFrozenFlavor=False):
try:
ttstr = _cast(ttstr)
except UnicodeEncodeError:
raise errors.ParseError("Trove tuple must be ASCII safe")
equals = ttstr.count('=')
left = ttstr.count('[')
right = ttstr.count(']')
if equals != 1 or left not in (0, 1) or right != left:
raise errors.ParseError("Not a valid trove tuple")
equals = ttstr.find('=')
left = ttstr.find('[')
right = ttstr.find(']')
name = ttstr[:equals]
if left < 0:
# No flavor.
assert right < 0
left = right = len(ttstr)
elif right != len(ttstr) - 1:
raise errors.ParseError("Not a valid trove tuple")
version = ttstr[equals + 1 : left]
flavor = ttstr[left + 1 : right]
if not version:
raise errors.ParseError("Not a valid trove tuple")
return cls(name, version, flavor)
class JobSpec(_namedtuple('JobSpec', 'name old new')):
"""
A job spec holds a single update request, including a name, optional old
version and flavor, and optional new version and flavor.
"""
__slots__ = ()
# TODO: Parsers, stringifiers, etc.
class JobTuple(_namedtuple('JobTuple', 'name old new absolute')):
"""
A job tuple represents a single trove job, consisting of a name, old
version and flavor, new version and flavor, and a flag indicating whether
the job is absolute.
"""
__slots__ = ()
# TODO: Parsers, stringifiers, etc.
def _cast(val):
"Return C{val.encode('ascii')} if it is a unicode, or C{val} otherwise."
if isinstance(val, unicode):
val = val.encode('ascii')
return val
|
apache-2.0
| -8,419,735,200,402,609,000
| 34.232877
| 79
| 0.616252
| false
| 4.123998
| false
| false
| false
|
allancaffee/scaly-mongo
|
scalymongo/structure_walker.py
|
1
|
4037
|
"""
Structure Walker
================
A utility used to aid in structure validation.
"""
from inspect import isclass
from scalymongo.errors import ValidationError
class StructureWalker(object):
"""A helper class to recurse a :class:`dict`-like object in accordance with
a structure.
:param field_translator: should be function mapping the ``value`` and
``type_`` to the new value for a key.
"""
def __init__(self, field_validator):
self.field_validator = field_validator
def walk_dict(self, body, structure, path=None):
"""Validate a dictionary in accordance with `structure`.
A :class:`ValidationError` is raised if any fields in `body` are
not present in `structure`.
"""
_check_for_unknown_fields(body, structure, path)
for field, sub_structure in structure.iteritems():
if isclass(field):
field_type = field
# For structures like {<TYPE>: {<STRUCT>}} iterate values
# in the body with keys of <TYPE> and verify each against
# <STRUCT>.
for key, value in body.iteritems():
if isinstance(key, field_type):
self._recurse_or_validate_field(
value, sub_structure, _join(path, key))
if field in body:
self._recurse_or_validate_field(
body[field], sub_structure, _join(path, field))
def _recurse_or_validate_field(self, value, sub_structure, path):
if isinstance(sub_structure, list):
assert len(sub_structure) == 1
if isinstance(value, dict):
# If the structure is a dict this is fine so long as all of the
# keys are integers or the positional operator (`$`). This
# happens with the $set update modifier since we expand
# {'foo.0.bar': 1} to {'foo': {'0': {'bar': 1}}}
for key, value in value.iteritems():
assert key.isdigit() or key == '$'
self._recurse_or_validate_field(
value, sub_structure[0], _join(path, key))
else:
# Validate each value in the list against the specified content
# type.
for i, value in enumerate(value):
self._recurse_or_validate_field(
value, sub_structure[0], _join(path, i))
return
if isinstance(sub_structure, dict):
self.walk_dict(value, sub_structure, path)
return
self.field_validator(path, value, sub_structure)
def _check_for_unknown_fields(body, structure, path):
"""Check `body` for any keys not present in `structure`.
This only checks the first level of keys. Any keys from :class:`dict`s in
the `body`\ 's values will not be checked.
"""
type_keys = tuple([key for key in structure if isclass(key)])
existing_fields = set([key for key in body if not isclass(key)])
unknown_fields = existing_fields.difference(structure.keys())
# If there are valid types for a key filter out unknown fields that match a
# type.
if type_keys:
unknown_fields = [key for key in unknown_fields
if not isinstance(key, type_keys)]
if unknown_fields:
unknown_fields = ', '.join([repr(field) for field in unknown_fields])
if path:
err = ('Encountered field(s), in subdocument at {0},'
' not present in structure: {1}'.format(
path, unknown_fields))
else:
err = 'Encountered field(s) not present in structure: {0}'.format(
unknown_fields)
raise ValidationError(err)
def _join(head, tail):
"""Join `head` and `tail` with a dot.
If head is ``None`` only `tail` is returned.
"""
if head is None:
return tail
return '{0}.{1}'.format(head, tail)
|
bsd-3-clause
| 7,673,683,350,043,057,000
| 34.725664
| 79
| 0.567005
| false
| 4.34086
| false
| false
| false
|
FedoraScientific/salome-smesh
|
src/Tools/blocFissure/gmu/rotTrans.py
|
1
|
2187
|
# -*- coding: utf-8 -*-
import logging
from geomsmesh import geompy
import math
from triedreBase import triedreBase
O, OX, OY, OZ = triedreBase()
# -----------------------------------------------------------------------------
# --- operateur de rotation translation d'un objet centré à l'origine
def rotTrans(objet, orientation, point, normal, trace = False):
"""
Déplacement par rotation translation d'un objet centré à l'origine, vers un point de la surface de la pièce saine
dans laquelle on insère le défaut.
@param objet : objet original centré à l'origine (geomObject)
@param orientation : rotation selon OX de l'objet original (degrés)
@param point : le point qui sera le centre de l'objet déplacé (geomObject), en général sur la surface de la pièce saine
@param normal : la normale à la surface de la pièce saine au point central (geomObject)
@return trans : objet transformé (geomObject)
"""
logging.info("start")
planXY = geompy.MakePlaneLCS(None, 2000, 1)
projXY = geompy.MakeProjection(normal, planXY)
[v1,v2] = geompy.ExtractShapes(projXY, geompy.ShapeType["VERTEX"], False)
xyz1 = geompy.PointCoordinates(v1)
xyz2 = geompy.PointCoordinates(v2)
x = xyz2[0] - xyz1[0]
y = xyz2[1] - xyz1[1]
sinalpha = y / math.sqrt(x*x + y*y)
cosalpha = x / math.sqrt(x*x + y*y)
alpha = math.asin(sinalpha)
if cosalpha < 0:
alpha = math.pi -alpha
beta = geompy.GetAngleRadians(OZ, normal)
[v1,v2] = geompy.ExtractShapes(normal, geompy.ShapeType["VERTEX"], False)
xyz1 = geompy.PointCoordinates(v1)
xyz2 = geompy.PointCoordinates(v2)
z = xyz2[2] - xyz1[2]
if z < 0:
beta = math.pi -beta
rot0 = geompy.MakeRotation(objet, OX, orientation*math.pi/180.0)
rot1 = geompy.MakeRotation(rot0, OZ, alpha)
axe2 = geompy.MakeRotation(OY, OZ, alpha)
rot2 = geompy.MakeRotation(rot1, axe2, beta -math.pi/2.)
logging.debug("alpha",alpha)
logging.debug("beta",beta)
if trace:
geompy.addToStudy( rot1, 'rot1' )
geompy.addToStudy( axe2, 'axe2' )
geompy.addToStudy( rot2, 'rot2' )
xyz = geompy.PointCoordinates(point)
trans = geompy.MakeTranslation(rot2, xyz[0], xyz[1], xyz[2])
return trans
|
lgpl-2.1
| 1,145,417,926,614,904,200
| 37.035088
| 121
| 0.677122
| false
| 2.837696
| false
| false
| false
|
Twi/amaya
|
amaya/base.py
|
1
|
8526
|
from exceptions import ConnectionError
from ircmess import IRCLine
from select import select
import socket
import ssl
class IRCBot:
"""
An IRCBot is a class that maintains a connection with a remote IRC server
and keeps track of channel members, information about the remote server,
and other things that the protocol gives that users might find useful.
"""
def __init__(self, host, port, ssl=False, nick="AmayaTest1", user="amaya",
gecos="Amaya 0.1", netname="ExampleNet", nickservpass=None,
encoding="UTF-8", sasl=False, debug=False, autojoin=[]):
"""
Args: remote host to connect to, port number to connect to
Keyword args:
- ssl: Whether or not to use SSL for the connection
- nick: nickname of bot
- user: ident the bot uses
- gecos: real name of the bot
- netname: Name of the network you're connecting to
- nickservpass: Password to use for authentication
- encoding: Character encoding to use
- sasl: Whether or not to attempt SASL authentication
"""
# Lots of variables, no way around this.
self.link = socket.socket()
self.link.connect((host, port))
self.__buf = ""
self.host = host
self.ssl = ssl
self.nick = nick
self.user = user
self.gecos = gecos
self.netname = netname
self.nickservpass = nickservpass
self.encoding = encoding
self.sasl = sasl
self.debug = debug
self.autojoin = []
self.servername = ""
self.ircdver = ""
self.snomask = ""
self.loggedinas = ""
self.ircdumodes = []
self.umodes = []
self.channels = {}
self.clients = {} # XXX: Is this a good idea?
self.isupport = {}
if self.ssl:
ssl.wrap_socket(self.link)
# Get a list of IRCv3 CAPs
self.send_line("CAP LS")
# Register with the remote server
self.send_line("NICK %s" % self.nick)
self.send_line("USER {0} {0} {0} :{1}".format(user, gecos))
def send_line(self, line):
"""
Takes in a raw line and sends it to the server. Don't use this without
good reason.
"""
if debug:
print(">>>", line)
self.link.send(bytes("%s\r\n" % line, "UTF-8"))
# The following functions are high level binds to common IRC client commands
def join(self, channel):
"""
Join a channel and set up the appropriate data structures.
"""
self.channels[channel.upper()] = {}
self.send_line("JOIN %s" % channel)
def part(self, channel, reason="Leaving"):
"""
Leave a channel and forget about it.
"""
del self.channels[channel.upper()]
self.send_line("PART %s :%s" % (channel, reason))
def message_like(self, kind, target, message):
"""
NOTICE and PRIVMSG are pretty similar commands. Handle both of them
the same.
"""
if message == "":
message = " "
self.send_line("%s %s :%s" % (kind, target, message))
def notice(self, target, message):
"""
Sends a NOTICE to someone. Please use this over PRIVMSG. Other bots
will not loop.
"""
self.message_like("NOTICE", target, message)
def privmsg(self, target, message):
"""
Sends a PRIVMSG to someone.
"""
self.message_like("PRIVMSG", target, message)
def ping(self, message="Amaya"):
"""
Send a PING to the remote server.
"""
self.send_line("PING :%" % message)
def change_nick(self, nickname):
"""
Request to change nickname
"""
self.expecting_nickchange = True
self.send_line("NICK %s" % nickname)
# Now is select() baggage and the line scraper
def process(self):
"""
Call this function when you have data on the socket.
"""
tbuf = self.link.recv(2048)
tbuf = self.__buf + tbuf.decode('UTF-8')
lines = tbuf.split("\r\n")
self.__buf = lines[-1]
lines = lines[:-1]
for line in lines:
self.process_line(line)
def process_line(self, line):
"""
Take a single line of traffic and process it.
"""
if debug:
print("<<<", line)
line = IRCLine(line)
if line.verb == "PING":
self.send_line("PONG :%s" % line.args[-1])
if hasattr(self, "on_%s" % line.verb):
func = getattr(self, "on_%s" % line.verb)
func(line)
# Base implementation of protocol verbs
# Numerics should be first and in numerical order
def on_001(self, line):
"""
RPL_WELCOME: This numeric is shown on registration. It shows the network
name.
"""
self.netname = line.args[-1].split()[3]
self.ping()
def on_004(self, line):
"""
RPL_MYINFO: This numeric shows the server name, ircd type and version,
as well as user and modes it supports.
"""
self.servername = line.args[0]
self.ircdver = line.args[1]
# Not scraping CMODES out here, 005 gives me a better place to find
# what has what syntax
self.ircdumodes = line.args[3]
# Apparently people care about +B that it's worth just setting it if
# available and not worrying about accidentally breaking some weird
# bot rule.
if "B" in self.ircdumodes:
self.send_line("MODE %s +B" % self.nick)
def on_005(self, line):
"""
RPL_ISUPPORT: Shows things that the server you are connected to supports.
This includes the list of prefixes and in some cases their meaning.
RPL_ISUPPORT strings vary from server to server, so best effort will be
made to support the most common ones, as well as the ones that the testnet
supports.
"""
isupport = line.args[1:]
for supp in isupport:
supp = supp.split("=")
if len(supp) == 1:
self.isupport[supp[0]] = None
else:
self.isupport[supp[0]] = supp[1]
def on_376(self, line):
"""
RPL_ENDMOTD: Sent once the server finishes its motd. Usually, this is
when channel joining is safe. But we are smarter than that, sometimes
servers don't have an MOTD.
"""
pass
def on_433(self, line):
"""
ERR_NICKINUSE: Sent from the server when a client tries to use a
nickname that another client is using. We should append an underscore
to our nick and request nickchange to that.
"""
self.nick += "_"
self.change_nick(self.nick)
def on_900(self, line):
"""
RPL_LOGGEDIN: Sent when the ircd logs you in via services sucessfully.
Some IRC daemons send this twice when you authenticate with sasl, but
other irc daemons only send this once.
"""
pass
# Put named verbs past here
def on_CAP(self, line):
if line.args[1] == "LS":
for cap in line.args[-1].split():
if cap == "sasl":
if self.sasl:
self.send_line("AUTHENTICATE PLAIN")
elif cap == "account-notify":
self.send_line("CAP REQ account-notify")
elif cap == "multi-prefix":
self.send_line("CAP REQ multi-prefix")
if not self.sasl:
self.send_line("CAP END")
def on_ERROR(self, line):
"""
ERROR is sent when the ircd kills off the connection forcibly.
This should error out with something spectacular.
"""
raise ConnectionError(line.args[-1])
def on_NICK(self, line):
"""
The server changed our nickname. If we are not expecting this, change
nickname back.
"""
if not self.expecting_nickchange:
self.change_nick(self.nick)
else:
self.nick = line.args[-1]
def on_PONG(self, line):
"""
The server replied to our PING message.
"""
if line.source == self.servername:
if len(self.channels) == 0:
for channel in self.autojoin:
self.join(channel)
|
mit
| 6,726,665,231,286,973,000
| 27.51505
| 82
| 0.554891
| false
| 3.985975
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.