text stringlengths 4 1.02M | meta dict |
|---|---|
import os,sys
sys.path = ['..'] + sys.path
import mbuild
env = mbuild.env_t()
env.parse_args()
env['jobs']=8
work_queue = mbuild.work_queue_t(env['jobs'])
all_cmds = ['python -c "print(2+2)"']
subs = {}
command_list = []
for cmd in all_cmds:
cmd = cmd % (subs)
mbuild.msgb('ADDING', cmd)
c = mbuild.command_t(cmd, output_file_name="foo")
work_queue.add(c)
command_list.append(cmd)
phase = "BUILD"
okay = work_queue.build()
if not okay:
mbuild.die("[%s] failed. dying..." % phase)
mbuild.msgb(phase, "succeeded")
| {
"content_hash": "5e3537d621f7f51bd52cf403fc81914a",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 53,
"avg_line_length": 21.64,
"alnum_prop": 0.6229205175600739,
"repo_name": "intelxed/mbuild",
"id": "980da3cf3a27ceb57b8346e7025ff9d75e2eb615",
"size": "1204",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/3.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2062"
},
{
"name": "Python",
"bytes": "347111"
},
{
"name": "Shell",
"bytes": "546"
}
],
"symlink_target": ""
} |
"""
The MIT License (MIT)
Copyright (c) <2016> <Larry McCaig (aka: Larz60+ aka: Larz60p)>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import json
class MakeTkArgsDb:
def __init__(self):
self.commands = {
'activebackground': {
'command_line': '-activebackground',
'database_class': 'Foreground',
'description': 'Specifies background color to use when drawing active elements. '
'An element (a widget or portion of a widget) is active if the '
'mouse cursor is positioned over the element and pressing a '
'mouse button will cause some action to occur. If strict Motif '
'compliance has been requested by setting the tk_strictMotif '
'variable, this option will normally be ignored; the normal '
'background color will be used instead. For some elements on '
'Windows and Macintosh systems, the active color will only be '
'used while mouse button 1 is pressed over the element.',
'valid_for': [
'Misc.tk_setPalette', 'Button', 'Checkbutton', 'Label', 'Menu',
'Radiobutton', 'Scale', 'Scrollbar', 'Spinbox',
],
'value_type': 'color'
},
'activeforeground': {
'command_line': '-activeforeground',
'database_class': 'Background',
'description': 'Specifies foreground color to use when drawing active elements. '
'See above for definition of active elements.',
'valid_for': [
'Misc.tk_setPalette', 'Button', 'Checkbutton', 'Label', 'Menu', 'Radiobutton'
],
'value_type': 'color'
},
'anchor': {
'command_line': '-anchor',
'database_class': 'Anchor',
'description': 'Specifies how the information in a widget (e.g. text or a bitmap) '
'is to be displayed in the widget. Must be one of the values n, '
'ne, e, se, s, sw, w, nw, or center. For example, nw means display '
'the information such that its top-left corner is at the top-left '
'corner of the widget.',
'valid_for': [
'grid_anchor', 'Pack', 'Place', 'Button', 'Checkbutton', 'Label', 'Radiobutton',
'OptionMenu', 'LabelFrame'
],
'value_type': 'compass_point'
},
'background': {
'command_line': '-background or -bg',
'database_class': 'Background',
'description': 'Specifies the normal background color to use when displaying the '
'widget.',
'valid_for': [
'Misc.tk_setPalette', 'Toplevel', 'Button', 'Canvas', 'Checkbutton', 'Entry',
'Frame', 'Label', 'Listbox', 'Menu', 'Radiobutton', 'Scale', 'Scrollbar',
'Text', 'BitmapImage', 'Spinbox', 'LabelFrame', 'PanedWindow'
],
'value_type': 'color'
},
'bitmap': {
'command_line': 'bitmap',
'database_class': 'Bitmap',
'description': 'Specifies a bitmap to display in the widget, in any of the forms '
'acceptable to Tk_GetBitmap. The exact way in which the bitmap is '
'displayed may be affected by other options such as anchor or '
'justify. Typically, if this option is specified then it overrides '
'other options that specify a textual value to display in the '
'widget but this is controlled by the compound option; the bitmap '
'option may be reset to an empty string to re-enable a text '
'display. In widgets that support both bitmap and image options, '
'image will usually override bitmap.',
'valid_for': [
'Wm.wm_iconbitmap', 'Wm.wm_iconmask', 'Button', 'create_bitmap', 'Checkbutton',
'Label', 'Radiobutton', 'Image.type', 'BitmapImage'
],
'value_type': 'X11_bitmaps'
},
'borderWidth': {
'command_line': '-borderwidth or -bd',
'database_class': 'BorderWidth',
'description': 'Specifies a non-negative value indicating the width of the 3-D '
'border to draw around the outside of the widget (if such a '
'border is being drawn; the relief option typically determines '
'this). The value may also be used when drawing 3-D effects in '
'the interior of the widget. The value may have any of the forms '
'acceptable to Tk_GetPixels.',
'valid_for': [
'Toplevel', 'Button', 'Canvas', 'Checkbutton', 'Entry', 'Frame', 'Label',
'Listbox', 'Menu', 'Radiobutton', 'Scale', 'Scrollbar', 'Text', 'OptionMenu',
'Spinbox', 'LabelFrame', 'PanedWindow'
],
'value_type': 'int'
},
'cursor': {
'command_line': '-cursor',
'database_class': 'Cursor',
'description': 'Specifies the mouse cursor to be used for the widget. The value '
'may have any of the forms acceptable to Tk_GetCursor. examples:'
'arrow, boat, pencil, man, ... see Grayson page 13',
'valid_for': [
'Toplevel', 'Button', 'Canvas', 'Canvas.icursor', 'Canvas.index',
'Canvas.select_adjust', 'Checkbutton', 'Entry', 'Entry.icursor', 'Entry.index',
'Entry.selection_adjust', 'Frame', 'Label', 'Listbox', 'Menu', 'Radiobutton',
'Scale', 'Scrollbar', 'Text', 'Spinbox', 'Spinbox.icursor', 'LabelFrame',
'PanedWindow'
],
'value_type': 'str'
},
'compound': {
'command_line': '-compound',
'database_class': 'Compound',
'description': 'Specifies if the widget should display text and bitmaps/images '
'at the same time, and if so, where the bitmap/image should be '
'placed relative to the text. Must be one of the values none, '
'bottom, top, left, right, or center. For example, the (default) '
'value none specifies that the bitmap or image should (if defined) '
'be displayed instead of the text, the value left specifies that '
'the bitmap or image should be displayed to the left of the text, '
'and the value center specifies that the bitmap or image should '
'be displayed on top of the text.',
'valid_for': [
'Button'
],
'value_type': 'LEFT, RIGHT, CENTER, TOP, BOTTOM'
},
'disabledForeground': {
'command_line': '-disabledforeground',
'database_class': 'DisabledForeground',
'description': 'Specifies foreground color to use when drawing a disabled '
'element. If the option is specified as an empty string (which '
'is typically the case on monochrome displays), disabled '
'elements are drawn with the normal foreground color but they '
'are dimmed by drawing them with a stippled fill pattern.',
'valid_for': [
'Misc.tk_setPalette', 'Button', 'Checkbutton', 'Label', 'Menu',
'Radiobutton', 'Spinbox'
],
'value_type': 'color'
},
'exportselection': {
'command_line': '-exportselection',
'database_class': 'ExportSelection',
'description': 'Specifies whether or not a selection in the widget should also '
'be the X selection. The value may have any of the forms '
'accepted by Tcl_GetBoolean, such as true, false, 0, 1, yes, or '
'no. If the selection is exported, then selecting in the widget '
'deselects the current X selection, selecting outside the widget '
'deselects any widget selection, and the widget will respond to '
'selection retrieval requests when it has a selection. The '
'default is usually for widgets to export selections.',
'valid_for': [
'Entry', 'Listbox', 'Text', 'Spinbox'
],
'value_type': 'boolean'
},
'font': {
'command_line': '-font',
'database_class': 'Font',
'description': 'Specifies the font to use when drawing text inside the widget. '
'The value may have any of the forms accepted by Tk_GetFont.',
'valid_for': [
'Button', 'Checkbutton', 'Entry', 'Label', 'Listbox', 'Menu', 'Radiobutton',
'Scale', 'Text', 'Spinbox', 'LabelFrame'
],
'value_type': 'font - see https://www.tcl.tk/man/tcl8.4/TkCmd/font.htm'
},
'foreground': {
'command_line': '-foreground or -fg',
'database_class': 'Foreground',
'description': 'Specifies the normal foreground color to use when displaying '
'the widget.',
'valid_for': [
'Misc.tk_setPalette', 'Button', 'Canvas', 'Checkbutton', 'Entry', 'Label',
'Listbox', 'Listbox.itemconfigure', 'Menu', 'Radiobutton', 'Scale',
'Text', 'BitmapImage', 'Spinbox', 'LabelFrame'
],
'value_type': 'color'
},
'highlightbackground': {
'command_line': '-highlightbackground',
'database_class': 'HighlightBackground',
'description': 'Specifies the color to display in the traversal highlight '
'region when the widget does not have the input focus.',
'valid_for': [
'Misc.tk_setPalette', 'Toplevel', 'Button', 'Canvas', 'Checkbutton',
'Entry', 'Frame', 'Label', 'Listbox', 'Radiobutton', 'Scale',
'Scrollbar', 'Text', 'Spinbox', 'LabelFrame'
],
'value_type': 'color'
},
'highlightcolor': {
'command_line': '-highlightcolor',
'database_class': 'HighlightColor',
'description': 'Specifies the color to use for the traversal highlight '
'rectangle that is drawn around the widget when it has the '
'input focus.',
'valid_for': [
'Misc.tk_setPalette', 'Toplevel', 'Button', 'Canvas', 'Checkbutton',
'Entry', 'Frame', 'Label', 'Listbox', 'Radiobutton', 'Scale',
'Scrollbar', 'Text', 'Spinbox', 'LabelFrame'
],
'value_type': 'color'
},
'highlightthickness': {
'command_line': '-highlightthickness',
'database_class': 'HighlightThickness',
'description': 'Specifies a non-negative value indicating the width of the '
'highlight rectangle to draw around the outside of the widget '
'when it has the input focus. The value may have any of the '
'forms acceptable to Tk_GetPixels. If the value is zero, no '
'focus highlight is drawn around the widget.',
'valid_for': [
'Toplevel', 'Button', 'Canvas', 'Checkbutton', 'Entry', 'Frame', 'Label',
'Listbox', 'Radiobutton', 'Scale', 'Scrollbar', 'Text', 'OptionMeu',
'Spinbox', 'LabelFrame'
],
'value_type': 'int'
},
'image': {
'command_line': '-image',
'database_class': 'Image',
'description': 'Specifies an image to display in the widget, which must have '
'been created with the image create command. Typically, if the '
'image option is specified then it overrides other options '
'that specify a bitmap or textual value to display in the '
'widget, though this is controlled by the compound option; '
'the image option may be reset to an empty string to re-enable '
'a bitmap or text display.',
'valid_for': [
'Misc.image_names', 'Misc.image_types', 'Misc.image_name', 'Button',
'Button.create_image', 'Checkbutton', 'Label', 'Radiobutton',
'Text.image_cget', 'Text.image_configure', 'Text.image_create',
'Text.image_names', 'Image', 'Image.__del__', 'Image.configure',
'Image.height', 'Image.type', 'Image.width', 'PhotoImage',
'PhotoImage.blank', 'PhotoImage.copy', 'PhotoImage.zoom',
'PhotoImage.subsample', 'PhotoImage.put', 'PhotoImage.write', 'image_names',
'image_types'
],
'value_type': 'image_name'
},
'insertbackground': {
'command_line': '-insertbackground',
'database_class': 'Foreground',
'description': 'Specifies the color to use as background in the area covered '
'by the insertion cursor. This color will normally override '
'either the normal background for the widget (or the selection '
'background if the insertion cursor happens to fall in the '
'selection).',
'valid_for': [
'Misc.tk_setPalette', 'Canvas', 'Entry', 'Text', 'Spinbox'
],
'value_type': 'color'
},
'insertborderWidth': {
'command_line': '-insertborderwidth',
'database_class': 'BorderWidth',
'description': 'Specifies a non-negative value indicating the width of the '
'3-D border to draw around the insertion cursor. The value '
'may have any of the forms acceptable to Tk_GetPixels.',
'valid_for': [
'Canvas', 'Entry', 'Text', 'Spinbox'
],
'value_type': 'int'
},
'insertofftime': {
'command_line': '-insertofftime',
'database_class': 'OffTime',
'description': 'Specifies a non-negative integer value indicating the number '
'of milliseconds the insertion cursor should remain ``off'' '
'in each blink cycle. If this option is zero then the cursor '
'doesn''t blink: it is on all the time.',
'valid_for': [
'Canvas', 'Entry', 'Text', 'Spinbox'
],
'value_type': 'int'
},
'insertontime': {
'command_line': '-insertontime',
'database_class': 'OnTime',
'description': 'Specifies a non-negative integer value indicating the number '
'of milliseconds the insertion cursor should remain ``on'' in '
'each blink cycle.',
'valid_for': [
'Canvas', 'Entry', 'Text', 'Spinbox'
],
'value_type': 'int'
},
'insertwidth': {
'command_line': '-insertwidth',
'database_class': 'InsertWidth',
'description': 'Specifies a value indicating the total width of the '
'insertion cursor. The value may have any of the forms '
'acceptable to Tk_GetPixels. If a border has been specified '
'for the insertion cursor (using the insertBorderWidth '
'option), the border will be drawn inside the width '
'specified by the insertWidth option.',
'valid_for': [
'Canvas', 'Entry', 'Text', 'Spinbox'
],
'value_type': 'int'
},
'jump': {
'command_line': '-jump',
'database_class': 'Jump',
'description': 'For widgets with a slider that can be dragged to adjust a '
'value, such as scrollbars, this option determines when '
'notifications are made about changes in the value. The '
'option''s value must be a boolean of the form accepted by '
'Tcl_GetBoolean. If the value is false, updates are made '
'continuously as the slider is dragged. If the value is '
'true, updates are delayed until the mouse button is released '
'to end the drag; at that point a single notification is '
'made (the value ``jumps'' rather than changing smoothly).',
'valid_for': [
'Scrollbar'
],
'value_type': 'boolean'
},
'justify': {
'command_line': '-justify',
'database_class': 'Justify',
'description': 'When there are multiple lines of text displayed in a widget, '
'this option determines how the lines line up with each other. '
'Must be one of left, center, or right. Left means that the '
'lines'' left edges all line up, center means that the lines'' '
'centers are aligned, and right means that the lines'' right '
'edges line up.',
'valid_for': [
'Button', 'Checkbutton', 'Entry', 'Label', 'Radiobutton', 'Spinbox'
],
'value_type': ''
},
'orient': {
'command_line': '-orient',
'database_class': 'Orient',
'description': 'For widgets that can lay themselves out with either a '
'horizontal or vertical orientation, such as scrollbars, '
'this option specifies which orientation should be used. '
'Must be either horizontal or vertical or an abbreviation '
'of one of these.',
'valid_for': [
'Scale', 'Scrollbar', 'PanedWindow'
],
'value_type': 'HORIZONTAL or VERTICAL'
},
'padx': {
'command_line': '-padx',
'database_class': 'Pad',
'description': 'Specifies a non-negative value indicating how much extra '
'space to request for the widget in the X-direction. The '
'value may have any of the forms acceptable to Tk_'
'GetPixels. When computing how large a window it needs, '
'the widget will add this amount to the width it would '
'normally need (as determined by the width of the things '
'displayed in the widget); if the geometry manager can '
'satisfy this request, the widget will end up with extra '
'internal space to the left and/or right of what it displays '
'inside. Most widgets only use this option for padding text: '
'if they are displaying a bitmap or image, then they usually '
'ignore padding options.',
'valid_for': [
'Pack', 'Grid', 'Button', 'Checkbutton', 'Label', 'Radiobutton',
'Text', 'LabelFrame', 'PanedWindow.paneconfigure'
],
'value_type': 'int'
},
'pady': {
'command_line': '-pady',
'database_class': 'Pad',
'description': 'Specifies a non-negative value indicating how much extra space '
'to request for the widget in the Y-direction. The value may '
'have any of the forms acceptable to Tk_GetPixels. When '
'computing how large a window it needs, the widget will add '
'this amount to the height it would normally need (as determined '
'by the height of the things displayed in the widget); if the '
'geometry manager can satisfy this request, the widget will end '
'up with extra internal space above and/or below what it '
'displays inside. Most widgets only use this option for padding '
'text: if they are displaying a bitmap or image, then they '
'usually ignore padding options.',
'valid_for': [
'Pack', 'Grid', 'Button', 'Checkbutton', 'Label', 'Radiobutton',
'Text', 'LabelFrame', 'PanedWindow.paneconfigure'
],
'value_type': 'int'
},
'relief': {
'command_line': '-relief',
'database_class': 'Relief',
'description': 'Specifies the 3-D effect desired for the widget. Acceptable '
'values are raised, sunken, flat, ridge, solid, and groove. '
'The value indicates how the interior of the widget should '
'appear relative to its exterior; for example, raised means '
'the interior of the widget should appear to protrude from '
'the screen, relative to the exterior of the widget.',
'valid_for': [
'Toplevel', 'Button', 'Canvas', 'Checkbutton', 'Entry', 'Frame', 'Label',
'Listbox', 'Menu', 'Radiobutton', 'Scale', 'Scrollbar', 'Text',
'OptionMenu', 'Spinbox', 'LabelFrame', 'PanedWindow'
],
'value_type': 'SUNKEN, FLAT, RAISED, GROOVE, or RIDGE'
},
'repeatdelay': {
'command_line': '-repeatdelay',
'database_class': 'RepeatDelay',
'description': 'Specifies the number of milliseconds a button or key must '
'be held down before it begins to auto-repeat. Used, for '
'example, on the up- and down-arrows in scrollbars.',
'valid_for': [
'Button', 'Scale', 'Scrollbar', 'Spinbox'
],
'value_type': 'int'
},
'repeatinterval': {
'command_line': '-repeatinterval',
'database_class': 'RepeatInterval',
'description': 'Used in conjunction with repeatDelay: once auto-repeat begins, '
'this option determines the number of milliseconds between '
'auto-repeats.',
'valid_for': [
'Button', 'Scale', 'Scrollbar', 'Spinbox'
],
'value_type': 'int'
},
'selectbackground': {
'command_line': '-selectbackground',
'database_class': 'Foreground',
'description': 'Specifies the background color to use when displaying '
'selected items.',
'valid_for': [
'Misc.tk_setPalette', 'Canvas', 'Entry', 'Listbox', 'Listbox.itemconfigure',
'Text', 'Spinbox'
],
'value_type': 'color'
},
'selectborderWidth': {
'command_line': '-selectborderwidth',
'database_class': 'BorderWidth',
'description': 'Specifies a non-negative value indicating the width of the '
'3-D border to draw around selected items. The value may '
'have any of the forms acceptable to Tk_GetPixels.',
'valid_for': [
'Canvas', 'Entry', 'Listbox', 'Text', 'Spinbox'
],
'value_type': 'int'
},
'selectforeground': {
'command_line': '-selectforeground',
'database_class': 'Background',
'description': 'Specifies the foreground color to use when displaying selected '
'items.',
'valid_for': [
'Misc.tk_setPalette', 'Canvas', 'Entry', 'Listbox', 'Listbox.itemconfigure',
'Text', 'Spinbox'
],
'value_type': 'color'
},
'setgrid': {
'command_line': '-setgrid',
'database_class': 'SetGrid',
'description': 'Specifies a boolean value that determines whether this '
'widget controls the resizing grid for its top-level window. '
'This option is typically used in text widgets, where the '
'information in the widget has a natural size (the size of '
'a character) and it makes sense for the window''s '
'dimensions to be integral numbers of these units. These '
'natural window sizes form a grid. If the setGrid option is '
'set to true then the widget will communicate with the '
'window manager so that when the user interactively resizes '
'the top-level window that contains the widget, the '
'dimensions of the window will be displayed to the user in '
'grid units and the window size will be constrained to '
'integral numbers of grid units. See the section GRIDDED '
'GEOMETRY MANAGEMENT in the wm manual entry for more details.',
'valid_for': [
'Listbox', 'Text'
],
'value_type': 'boolean'
},
'takefocus': {
'command_line': '-takefocus',
'database_class': 'TakeFocus',
'description': 'Determines whether the window accepts the focus during '
'keyboard traversal (e.g., Tab and Shift-Tab). Before setting '
'the focus to a window, the traversal scripts consult the '
'value of the takeFocus option. A value of 0 means that the '
'window should be skipped entirely during keyboard traversal. '
'1 means that the window should receive the input focus as '
'long as it is viewable (it and all of its ancestors are '
'mapped). An empty value for the option means that the '
'traversal scripts make the decision about whether or not '
'to focus on the window: the current algorithm is to skip '
'the window if it is disabled, if it has no key bindings, '
'or if it is not viewable. If the value has any other '
'form, then the traversal scripts take the value, append '
'the name of the window to it (with a separator space), '
'and evaluate the resulting string as a Tcl script. The '
'script must return 0, 1, or an empty string: a 0 or 1 value '
'specifies whether the window will receive the input focus, '
'and an empty string results in the default decision '
'described above. Note: this interpretation of the option '
'is defined entirely by the Tcl scripts that implement '
'traversal: the widget implementations ignore the option '
'entirely, so you can change its meaning if you redefine the '
'keyboard traversal scripts.',
'valid_for': [
'Misc.tk_focusNext', 'Toplevel', 'Button', 'Checkbutton', 'Checkbutton',
'Frame', 'Label', 'Listbox', 'Menu', 'Radiobutton', 'Scale', 'Scrollbar',
'Text', 'Spinbox', 'LabelFrame'
],
'value_type': '1 or 0'
},
'text': {
'command_line': '-text',
'database_class': 'Text',
'description': 'Specifies a string to be displayed inside the widget. The way '
'in which the string is displayed depends on the particular '
'widget and may be determined by other options, such as anchor '
'or justify.',
'valid_for': [
'Button', 'Canvas.create_text', 'Canvas.dchars', 'Checkbutton', 'Entry',
'Entry.delete', 'Entry.get', 'Label', 'Message', 'Radiobutton', 'Text.dump',
'Text.get', 'LabelFrame'
],
'value_type': 'str'
},
'textvariable': {
'command_line': '-textvariable',
'database_class': 'Variable',
'description': 'Specifies the name of a global variable. The value of the '
'variable is a text string to be displayed inside the widget; '
'if the variable value changes then the widget will '
'automatically update itself to reflect the new value. The '
'way in which the string is displayed in the widget depends '
'on the particular widget and may be determined by other '
'options, such as anchor or justify.',
'valid_for': [
'Button', 'Checkbutton', 'Entry', 'Label', 'Radiobutton', 'OptionMenu',
'Spinbox'
],
'value_type': 'tkinter variable, StringVar'
},
'troughcolor': {
'command_line': '-troughcolor',
'database_class': 'Background',
'description': 'Specifies the color to use for the rectangular trough areas '
'in widgets such as scrollbars and scales. This option is '
'ignored for scrollbars on Windows (native widget doesn''t '
'recognize this option).',
'valid_for': [
'Misc.tk_setPalette', 'Scale', 'Scrollbar'
],
'value_type': 'color'
},
'underline': {
'command_line': '-underline',
'database_class': 'Underline',
'description': 'Specifies the integer index of a character to underline in '
'the widget. This option is used by the default bindings to '
'implement keyboard traversal for menu buttons and menu '
'entries. 0 corresponds to the first character of the text '
'displayed in the widget, 1 to the next character, and so on.',
'valid_for': [
'Button', 'Checkbutton', 'Label', 'Radiobutton'
],
'value_type': 'int - index of char'
},
'wraplength': {
'command_line': '-wraplength',
'database_class': 'WrapLength',
'description': 'For widgets that can perform word-wrapping, this option '
'specifies the maximum line length. Lines that would exceed '
'this length are wrapped onto the next line, so that no '
'line is longer than the specified length. The value may '
'be specified in any of the standard forms for screen '
'distances. If this value is less than or equal to 0 then '
'no wrapping is done: lines will break only at newline '
'characters in the text.',
'valid_for': [
'Button', 'Checkbutton', 'Label', 'Radiobutton'
],
'value_type': 'int'
},
'xscrollcommand': {
'command_line': '-xscrollcommand',
'database_class': 'ScrollCommand',
'description': 'Specifies the prefix for a command used to communicate with '
'horizontal scrollbars. When the view in the widget''s window '
'changes (or whenever anything else occurs that could change '
'the display in a scrollbar, such as a change in the total '
'size of the widget''s contents), the widget will generate a '
'Tcl command by concatenating the scroll command and two '
'numbers. Each of the numbers is a fraction between 0 and 1, '
'which indicates a position in the document. 0 indicates the '
'beginning of the document, 1 indicates the end, .333 '
'indicates a position one third the way through the '
'document, and so on. The first fraction indicates the '
'first information in the document that is visible in the '
'window, and the second fraction indicates the information '
'just after the last portion that is visible. The command '
'is then passed to the Tcl interpreter for execution. '
'Typically the xScrollCommand option consists of the path '
'name of a scrollbar widget followed by ``set'', e.g. '
'``.x.scrollbar set'': this will cause the scrollbar to be '
'updated whenever the view in the window changes. If this '
'option is not specified, then no command will be executed.',
'valid_for': [
'Canvas', 'Entry', 'Listbox', 'Text', 'Spinbox'
],
'value_type': 'fraction between 0 and 1 see desc'
},
'yscrollcommand': {
'command_line': '-yscrollcommand',
'database_class': 'ScrollCommand',
'description': 'Specifies the prefix for a command used to communicate with '
'vertical scrollbars. This option is treated in the same way '
'as the xScrollCommand option, except that it is used for '
'vertical scrollbars and is provided by widgets that '
'support vertical scrolling. See the description of '
'xScrollCommand for details on how this option is used.',
'valid_for': [
'Canvas', 'Listbox', 'Text', 'Spinbox'
],
'value_type': 'fraction between 0 and 1'
}
}
with open("tkinterCommandArgumnts.json", "w") as f:
j = json.dumps(self.commands)
f.write(j)
if __name__ == '__main__':
MakeTkArgsDb()
| {
"content_hash": "e38b1d8a8ef5d1e0b9b2e171a5cc8713",
"timestamp": "",
"source": "github",
"line_count": 659,
"max_line_length": 100,
"avg_line_length": 59.69044006069803,
"alnum_prop": 0.4790268456375839,
"repo_name": "Larz60p/TkArgs",
"id": "a3ff94a22dcc7f72e8fab5521273ca057f544c6a",
"size": "39336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MakeTkArgsDb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41971"
}
],
"symlink_target": ""
} |
import base64
import cgi
import copy
import csv
import datetime
import decimal
import os
import shutil
import sys
import types
from ._compat import PY2, StringIO, pjoin, exists, hashlib_md5, \
integer_types, basestring, iteritems, xrange, implements_iterator, \
implements_bool, copyreg, reduce
from ._globals import DEFAULT, IDENTITY, AND, OR
from ._gae import Key
from .exceptions import NotFoundException, NotAuthorizedException
from .helpers.regex import REGEX_TABLE_DOT_FIELD, REGEX_ALPHANUMERIC, \
REGEX_PYTHON_KEYWORDS, REGEX_STORE_PATTERN, REGEX_UPLOAD_PATTERN, \
REGEX_CLEANUP_FN, REGEX_VALID_TB_FLD
from .helpers.classes import Reference, MethodAdder, SQLCallableList, SQLALL, \
Serializable, BasicStorage
from .helpers.methods import list_represent, bar_decode_integer, \
bar_decode_string, bar_encode, archive_record, cleanup, \
use_common_filters, pluralize
from .helpers.serializers import serializers
long = integer_types[-1]
DEFAULTLENGTH = {'string': 512, 'password': 512, 'upload': 512, 'text': 2**15,
'blob': 2**31}
class Row(BasicStorage):
"""
A dictionary that lets you do d['a'] as well as d.a
this is only used to store a `Row`
"""
def __getitem__(self, k):
key = str(k)
_extra = super(Row, self).get('_extra', None)
if _extra is not None:
v = _extra.get(key, DEFAULT)
if v != DEFAULT:
return v
try:
return BasicStorage.__getattribute__(self, key)
except AttributeError:
pass
m = REGEX_TABLE_DOT_FIELD.match(key)
if m:
try:
e = super(Row, self).__getitem__(m.group(1))
return e[m.group(2)]
except (KeyError, TypeError):
pass
key = m.group(2)
try:
return super(Row, self).__getitem__(key)
except KeyError:
pass
try:
e = super(Row, self).get('__get_lazy_reference__')
if e is not None and callable(e):
self[key] = e(key)
return self[key]
except Exception as e:
raise e
raise KeyError
__str__ = __repr__ = lambda self: '<Row %s>' % self.as_dict(custom_types=[LazySet])
__int__ = lambda self: self.get('id')
__long__ = lambda self: long(self.get('id'))
__call__ = __getitem__
def __getattr__(self, k):
try:
return self.__getitem__(k)
except KeyError:
raise AttributeError
def __copy__(self):
return Row(self)
def __eq__(self, other):
try:
return self.as_dict() == other.as_dict()
except AttributeError:
return False
def get(self, key, default=None):
try:
return self.__getitem__(key)
except(KeyError, AttributeError, TypeError):
return default
def as_dict(self, datetime_to_str=False, custom_types=None):
SERIALIZABLE_TYPES = [str, int, float, bool, list, dict]
if PY2:
SERIALIZABLE_TYPES += [unicode, long]
if isinstance(custom_types,(list,tuple,set)):
SERIALIZABLE_TYPES += custom_types
elif custom_types:
SERIALIZABLE_TYPES.append(custom_types)
d = dict(self)
for k in list(d.keys()):
v=d[k]
if d[k] is None:
continue
elif isinstance(v,Row):
d[k]=v.as_dict()
elif isinstance(v,Reference):
d[k]=long(v)
elif isinstance(v,decimal.Decimal):
d[k]=float(v)
elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
if datetime_to_str:
d[k] = v.isoformat().replace('T',' ')[:19]
elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
del d[k]
return d
def as_xml(self, row_name="row", colnames=None, indent=' '):
def f(row,field,indent=' '):
if isinstance(row,Row):
spc = indent+' \n'
items = [f(row[x],x,indent+' ') for x in row]
return '%s<%s>\n%s\n%s</%s>' % (
indent,
field,
spc.join(item for item in items if item),
indent,
field)
elif not callable(row):
if REGEX_ALPHANUMERIC.match(field):
return '%s<%s>%s</%s>' % (indent,field,row,field)
else:
return '%s<extra name="%s">%s</extra>' % \
(indent,field,row)
else:
return None
return f(self, row_name, indent=indent)
def as_json(self, mode="object", default=None, colnames=None,
serialize=True, **kwargs):
"""
serializes the row to a JSON object
kwargs are passed to .as_dict method
only "object" mode supported
`serialize = False` used by Rows.as_json
TODO: return array mode with query column order
mode and colnames are not implemented
"""
item = self.as_dict(**kwargs)
if serialize:
return serializers.json(item)
else:
return item
def pickle_row(s):
return Row, (dict(s),)
copyreg.pickle(Row, pickle_row)
class Table(Serializable, BasicStorage):
"""
Represents a database table
Example::
You can create a table as::
db = DAL(...)
db.define_table('users', Field('name'))
And then::
db.users.insert(name='me') # print db.users._insert(...) to see SQL
db.users.drop()
"""
def __init__(
self,
db,
tablename,
*fields,
**args):
"""
Initializes the table and performs checking on the provided fields.
Each table will have automatically an 'id'.
If a field is of type Table, the fields (excluding 'id') from that table
will be used instead.
Raises:
SyntaxError: when a supplied field is of incorrect type.
"""
# import DAL here to avoid circular imports
from .base import DAL
super(Table, self).__init__()
self._actual = False # set to True by define_table()
self._db = db
self._tablename = tablename
if (not isinstance(tablename, str) or hasattr(DAL, tablename)
or not REGEX_VALID_TB_FLD.match(tablename)
or REGEX_PYTHON_KEYWORDS.match(tablename)
):
raise SyntaxError('Field: invalid table name: %s, '
'use rname for "funny" names' % tablename)
self._ot = None
self._rname = args.get('rname')
self._sequence_name = (args.get('sequence_name') or
db and db._adapter.sequence_name(self._rname
or tablename))
self._trigger_name = (args.get('trigger_name') or
db and db._adapter.trigger_name(tablename))
self._common_filter = args.get('common_filter')
self._format = args.get('format')
self._singular = args.get(
'singular', tablename.replace('_', ' ').capitalize())
self._plural = args.get(
'plural', pluralize(self._singular.lower()).capitalize())
# horrible but for backard compatibility of appamdin:
if 'primarykey' in args and args['primarykey'] is not None:
self._primarykey = args.get('primarykey')
self._before_insert = []
self._before_update = [Set.delete_uploaded_files]
self._before_delete = [Set.delete_uploaded_files]
self._after_insert = []
self._after_update = []
self._after_delete = []
self.add_method = MethodAdder(self)
fieldnames, newfields=set(), []
_primarykey = getattr(self, '_primarykey', None)
if _primarykey is not None:
if not isinstance(_primarykey, list):
raise SyntaxError(
"primarykey must be a list of fields from table '%s'"
% tablename)
if len(_primarykey) == 1:
self._id = [f for f in fields if isinstance(f, Field)
and f.name ==_primarykey[0]][0]
elif not [f for f in fields if (isinstance(f, Field) and
f.type == 'id') or (isinstance(f, dict) and
f.get("type", None) == "id")]:
field = Field('id', 'id')
newfields.append(field)
fieldnames.add('id')
self._id = field
virtual_fields = []
def include_new(field):
newfields.append(field)
fieldnames.add(field.name)
if field.type == 'id':
self._id = field
for field in fields:
if isinstance(field, (FieldMethod, FieldVirtual)):
virtual_fields.append(field)
elif isinstance(field, Field) and not field.name in fieldnames:
if field.db is not None:
field = copy.copy(field)
include_new(field)
elif isinstance(field, Table):
table = field
for field in table:
if not field.name in fieldnames and not field.type == 'id':
t2 = not table._actual and self._tablename
include_new(field.clone(point_self_references_to=t2))
elif isinstance(field, dict) and not field['fieldname'] in fieldnames:
include_new(Field(**field))
elif not isinstance(field, (Field, Table)):
raise SyntaxError(
'define_table argument is not a Field or Table: %s' % field)
fields = newfields
tablename = tablename
self._fields = SQLCallableList()
self.virtualfields = []
fields = list(fields)
if db and db._adapter.uploads_in_blob is True:
uploadfields = [f.name for f in fields if f.type == 'blob']
for field in fields:
fn = field.uploadfield
if isinstance(field, Field) and field.type == 'upload'\
and fn is True and not field.uploadfs:
fn = field.uploadfield = '%s_blob' % field.name
if isinstance(fn, str) and not fn in uploadfields and not field.uploadfs:
fields.append(Field(fn, 'blob', default='',
writable=False, readable=False))
fieldnames_set = set()
reserved = dir(Table) + ['fields']
if (db and db.check_reserved):
check_reserved = db.check_reserved_keyword
else:
def check_reserved(field_name):
if field_name in reserved:
raise SyntaxError("field name %s not allowed" % field_name)
for field in fields:
field_name = field.name
check_reserved(field_name)
if db and db._ignore_field_case:
fname_item = field_name.lower()
else:
fname_item = field_name
if fname_item in fieldnames_set:
raise SyntaxError("duplicate field %s in table %s" %
(field_name, tablename))
else:
fieldnames_set.add(fname_item)
self.fields.append(field_name)
self[field_name] = field
if field.type == 'id':
self['id'] = field
field.tablename = field._tablename = tablename
field.table = field._table = self
field.db = field._db = db
self.ALL = SQLALL(self)
if _primarykey is not None:
for k in _primarykey:
if k not in self.fields:
raise SyntaxError(
"primarykey must be a list of fields from table '%s " %
tablename)
else:
self[k].notnull = True
for field in virtual_fields:
self[field.name] = field
@property
def fields(self):
return self._fields
def update(self, *args, **kwargs):
raise RuntimeError("Syntax Not Supported")
def _enable_record_versioning(self,
archive_db=None,
archive_name='%(tablename)s_archive',
is_active='is_active',
current_record='current_record',
current_record_label=None):
db = self._db
archive_db = archive_db or db
archive_name = archive_name % dict(tablename=self._tablename)
if archive_name in archive_db.tables():
return # do not try define the archive if already exists
fieldnames = self.fields()
same_db = archive_db is db
field_type = self if same_db else 'bigint'
clones = []
for field in self:
nfk = same_db or not field.type.startswith('reference')
clones.append(
field.clone(unique=False, type=field.type if nfk else 'bigint')
)
archive_db.define_table(
archive_name,
Field(current_record, field_type, label=current_record_label),
*clones, **dict(format=self._format))
self._before_update.append(
lambda qset, fs, db=archive_db, an=archive_name, cn=current_record:
archive_record(qset, fs, db[an], cn))
if is_active and is_active in fieldnames:
self._before_delete.append(
lambda qset: qset.update(is_active=False))
newquery = lambda query, t=self, name=self._tablename: \
reduce(AND, [db[tn].is_active == True
for tn in db._adapter.tables(query)
if tn == name or getattr(db[tn],'_ot',None)==name])
query = self._common_filter
if query:
self._common_filter = lambda q: reduce(AND, [query(q), newquery(q)])
else:
self._common_filter = newquery
def _validate(self, **vars):
errors = Row()
for key, value in iteritems(vars):
value, error = self[key].validate(value)
if error:
errors[key] = error
return errors
def _create_references(self):
db = self._db
pr = db._pending_references
self._referenced_by_list = []
self._referenced_by = []
self._references = []
for field in self:
#fieldname = field.name ##FIXME not used ?
field_type = field.type
if isinstance(field_type, str) and (
field_type.startswith('reference ') or
field_type.startswith('list:reference ')):
is_list = field_type[:15] == 'list:reference '
if is_list:
ref = field_type[15:].strip()
else:
ref = field_type[10:].strip()
if not ref:
SyntaxError('Table: reference to nothing: %s' % ref)
if '.' in ref:
rtablename, throw_it, rfieldname = ref.partition('.')
else:
rtablename, rfieldname = ref, None
if not rtablename in db:
pr[rtablename] = pr.get(rtablename, []) + [field]
continue
rtable = db[rtablename]
if rfieldname:
if not hasattr(rtable, '_primarykey'):
raise SyntaxError(
'keyed tables can only reference other keyed tables (for now)')
if rfieldname not in rtable.fields:
raise SyntaxError(
"invalid field '%s' for referenced table '%s'"
" in table '%s'" % (rfieldname, rtablename, self._tablename)
)
rfield = rtable[rfieldname]
else:
rfield = rtable._id
if is_list:
rtable._referenced_by_list.append(field)
else:
rtable._referenced_by.append(field)
field.referent = rfield
self._references.append(field)
else:
field.referent = None
if self._tablename in pr:
referees = pr.pop(self._tablename)
for referee in referees:
if referee.type.startswith('list:reference '):
self._referenced_by_list.append(referee)
else:
self._referenced_by.append(referee)
def _filter_fields(self, record, id=False):
return dict([(k, v) for (k, v) in iteritems(record) if k
in self.fields and (self[k].type!='id' or id)])
def _build_query(self,key):
""" for keyed table only """
query = None
for k,v in iteritems(key):
if k in self._primarykey:
if query:
query = query & (self[k] == v)
else:
query = (self[k] == v)
else:
raise SyntaxError(
'Field %s is not part of the primary key of %s' %
(k,self._tablename)
)
return query
def __getitem__(self, key):
if not key:
return None
elif isinstance(key, dict):
""" for keyed table """
query = self._build_query(key)
return self._db(query).select(
limitby=(0, 1),
orderby_on_limitby=False
).first()
else:
try:
isgoogle = 'google' in self._db._drivers_available and \
isinstance(key, Key)
except:
isgoogle = False
if str(key).isdigit() or isgoogle:
return self._db(self._id == key).select(
limitby=(0, 1),
orderby_on_limitby=False
).first()
else:
try:
return getattr(self, key)
except:
raise KeyError(key)
def __call__(self, key=DEFAULT, **kwargs):
for_update = kwargs.get('_for_update', False)
if '_for_update' in kwargs:
del kwargs['_for_update']
orderby = kwargs.get('_orderby', None)
if '_orderby' in kwargs:
del kwargs['_orderby']
if not key is DEFAULT:
if isinstance(key, Query):
record = self._db(key).select(
limitby=(0,1),
for_update=for_update,
orderby=orderby,
orderby_on_limitby=False).first()
elif not str(key).isdigit():
record = None
else:
record = self._db(self._id == key).select(
limitby=(0,1),
for_update=for_update,
orderby=orderby,
orderby_on_limitby=False).first()
if record:
for k,v in iteritems(kwargs):
if record[k]!=v: return None
return record
elif kwargs:
query = reduce(lambda a,b:a&b,[self[k]==v for k,v in iteritems(kwargs)])
return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
else:
return None
def __setitem__(self, key, value):
if isinstance(key, dict) and isinstance(value, dict):
""" option for keyed table """
if set(key.keys()) == set(self._primarykey):
value = self._filter_fields(value)
kv = {}
kv.update(value)
kv.update(key)
if not self.insert(**kv):
query = self._build_query(key)
self._db(query).update(**self._filter_fields(value))
else:
raise SyntaxError(
'key must have all fields from primary key: %s'%
(self._primarykey))
elif str(key).isdigit():
if key == 0:
self.insert(**self._filter_fields(value))
elif self._db(self._id == key)\
.update(**self._filter_fields(value)) is None:
raise SyntaxError('No such record: %s' % key)
else:
if isinstance(key, dict):
raise SyntaxError(
'value must be a dictionary: %s' % value)
self.__dict__[str(key)] = value
def __setattr__(self, key, value):
if key[:1]!='_' and key in self:
raise SyntaxError('Object exists and cannot be redefined: %s' % key)
self.__dict__[key] = value
def __delitem__(self, key):
if isinstance(key, dict):
query = self._build_query(key)
if not self._db(query).delete():
raise SyntaxError('No such record: %s' % key)
elif not str(key).isdigit() or \
not self._db(self._id == key).delete():
raise SyntaxError('No such record: %s' % key)
def __iter__(self):
for fieldname in self.fields:
yield self[fieldname]
def __repr__(self):
return '<Table %s (%s)>' % (self._tablename, ','.join(self.fields()))
def __str__(self):
if self._ot is not None:
ot = self._ot
if 'Oracle' in str(type(self._db._adapter)):
return '%s %s' % (ot, self._tablename)
return '%s AS %s' % (ot, self._tablename)
return self._tablename
@property
def sqlsafe(self):
rname = self._rname
if rname: return rname
return self._db._adapter.sqlsafe_table(self._tablename)
@property
def sqlsafe_alias(self):
rname = self._rname
ot = self._ot
if rname and not ot: return rname
return self._db._adapter.sqlsafe_table(self._tablename, self._ot)
def _drop(self, mode=''):
return self._db._adapter._drop(self, mode)
def drop(self, mode=''):
return self._db._adapter.drop(self,mode)
def _listify(self, fields, update=False):
new_fields = {} # format: new_fields[name] = (field,value)
# store all fields passed as input in new_fields
for name in fields:
if not name in self.fields:
if name != 'id':
raise SyntaxError(
'Field %s does not belong to the table' % name)
else:
field = self[name]
value = fields[name]
if field.filter_in:
value = field.filter_in(value)
new_fields[name] = (field, value)
# check all fields that should be in the table but are not passed
to_compute = []
for ofield in self:
name = ofield.name
if not name in new_fields:
# if field is supposed to be computed, compute it!
if ofield.compute: # save those to compute for later
to_compute.append((name, ofield))
# if field is required, check its default value
elif not update and not ofield.default is None:
value = ofield.default
fields[name] = value
new_fields[name] = (ofield, value)
# if this is an update, user the update field instead
elif update and not ofield.update is None:
value = ofield.update
fields[name] = value
new_fields[name] = (ofield, value)
# if the field is still not there but it should, error
elif not update and ofield.required:
raise RuntimeError(
'Table: missing required field: %s' % name)
# now deal with fields that are supposed to be computed
if to_compute:
row = Row(fields)
for name, ofield in to_compute:
# try compute it
try:
row[name] = new_value = ofield.compute(row)
new_fields[name] = (ofield, new_value)
except (KeyError, AttributeError):
# error silently unless field is required!
if ofield.required:
raise SyntaxError('unable to compute field: %s' % name)
return list(new_fields.values())
def _attempt_upload(self, fields):
for field in self:
if field.type == 'upload' and field.name in fields:
value = fields[field.name]
if not (value is None or isinstance(value, str)):
if hasattr(value, 'file') and hasattr(value, 'filename'):
new_name = field.store(value.file, filename=value.filename)
elif isinstance(value,dict):
if 'data' in value and 'filename' in value:
stream = StringIO(value['data'])
new_name = field.store(stream, filename=value['filename'])
else:
new_name = None
elif hasattr(value, 'read') and hasattr(value, 'name'):
new_name = field.store(value, filename=value.name)
else:
raise RuntimeError("Unable to handle upload")
fields[field.name] = new_name
def _defaults(self, fields):
"If there are no fields/values specified, return table defaults"
fields = copy.copy(fields)
for field in self:
if (not field.name in fields and
field.type != "id" and
field.compute is not None and
field.default is not None):
fields[field.name] = field.default
return fields
def _insert(self, **fields):
fields = self._defaults(fields)
return self._db._adapter._insert(self, self._listify(fields))
def insert(self, **fields):
fields = self._defaults(fields)
self._attempt_upload(fields)
if any(f(fields) for f in self._before_insert):
return 0
ret = self._db._adapter.insert(self, self._listify(fields))
if ret and self._after_insert:
fields = Row(fields)
[f(fields, ret) for f in self._after_insert]
return ret
def _validate_fields(self, fields, defattr='default'):
response = Row()
response.id, response.errors = None, Row()
new_fields = copy.copy(fields)
for fieldname in self.fields:
default = getattr(self[fieldname], defattr)
if callable(default):
default = default()
raw_value = fields.get(fieldname, default)
value, error = self[fieldname].validate(raw_value)
if error:
response.errors[fieldname] = "%s" % error
elif value is not None:
new_fields[fieldname] = value
return response, new_fields
def validate_and_insert(self, **fields):
response, new_fields = self._validate_fields(fields)
if not response.errors:
response.id = self.insert(**new_fields)
return response
def validate_and_update(self, _key=DEFAULT, **fields):
response, new_fields = self._validate_fields(fields, 'update')
#: select record(s) for update
if _key is DEFAULT:
record = self(**fields)
elif isinstance(_key, dict):
record = self(**_key)
else:
record = self(_key)
#: do the update
if not response.errors and record:
if '_id' in self:
myset = self._db(self._id == record[self._id.name])
else:
query = None
for key, value in iteritems(_key):
if query is None:
query = getattr(self, key) == value
else:
query = query & (getattr(self, key) == value)
myset = self._db(query)
response.id = myset.update(**new_fields)
return response
def update_or_insert(self, _key=DEFAULT, **values):
if _key is DEFAULT:
record = self(**values)
elif isinstance(_key, dict):
record = self(**_key)
else:
record = self(_key)
if record:
record.update_record(**values)
newid = None
else:
newid = self.insert(**values)
return newid
def validate_and_update_or_insert(self, _key=DEFAULT, **fields):
if _key is DEFAULT or _key == '':
primary_keys = {}
for key, value in iteritems(fields):
if key in self._primarykey:
primary_keys[key] = value
if primary_keys != {}:
record = self(**primary_keys)
_key = primary_keys
else:
required_keys = {}
for key, value in iteritems(fields):
if getattr(self, key).required:
required_keys[key] = value
record = self(**required_keys)
_key = required_keys
elif isinstance(_key, dict):
record = self(**_key)
else:
record = self(_key)
if record:
response = self.validate_and_update(_key, **fields)
if hasattr(self, '_primarykey'):
primary_keys = {}
for key in self._primarykey:
primary_keys[key] = getattr(record, key)
response.id = primary_keys
else:
response = self.validate_and_insert(**fields)
return response
def bulk_insert(self, items):
"""
here items is a list of dictionaries
"""
listify_items = [self._listify(item) for item in items]
if any(f(item) for item in items for f in self._before_insert):return 0
ret = self._db._adapter.bulk_insert(self, listify_items)
ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
return ret
def _truncate(self, mode=None):
return self._db._adapter._truncate(self, mode)
def truncate(self, mode=None):
return self._db._adapter.truncate(self, mode)
def import_from_csv_file(
self,
csvfile,
id_map=None,
null='<NULL>',
unique='uuid',
id_offset=None, # id_offset used only when id_map is None
*args, **kwargs
):
"""
Import records from csv file.
Column headers must have same names as table fields.
Field 'id' is ignored.
If column names read 'table.file' the 'table.' prefix is ignored.
- 'unique' argument is a field which must be unique (typically a
uuid field)
- 'restore' argument is default False; if set True will remove old values
in table first.
- 'id_map' if set to None will not map ids
The import will keep the id numbers in the restored table.
This assumes that there is an field of type id that is integer and in
incrementing order.
Will keep the id numbers in restored table.
"""
delimiter = kwargs.get('delimiter', ',')
quotechar = kwargs.get('quotechar', '"')
quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
restore = kwargs.get('restore', False)
if restore:
self._db[self].truncate()
reader = csv.reader(csvfile, delimiter=delimiter,
quotechar=quotechar, quoting=quoting)
colnames = None
if isinstance(id_map, dict):
if not self._tablename in id_map:
id_map[self._tablename] = {}
id_map_self = id_map[self._tablename]
def fix(field, value, id_map, id_offset):
list_reference_s='list:reference'
if value == null:
value = None
elif field.type=='blob':
value = base64.b64decode(value)
elif field.type=='double' or field.type=='float':
if not value.strip():
value = None
else:
value = float(value)
elif field.type in ('integer','bigint'):
if not value.strip():
value = None
else:
value = long(value)
elif field.type.startswith('list:string'):
value = bar_decode_string(value)
elif field.type.startswith(list_reference_s):
ref_table = field.type[len(list_reference_s):].strip()
if id_map is not None:
value = [id_map[ref_table][long(v)] \
for v in bar_decode_string(value)]
else:
value = [v for v in bar_decode_string(value)]
elif field.type.startswith('list:'):
value = bar_decode_integer(value)
elif id_map and field.type.startswith('reference'):
try:
value = id_map[field.type[9:].strip()][long(value)]
except KeyError:
pass
elif id_offset and field.type.startswith('reference'):
try:
value = id_offset[field.type[9:].strip()]+long(value)
except KeyError:
pass
return (field.name, value)
def is_id(colname):
if colname in self:
return self[colname].type == 'id'
else:
return False
first = True
unique_idx = None
for lineno, line in enumerate(reader):
if not line:
break
if not colnames:
# assume this is the first line of the input, contains colnames
colnames = [x.split('.',1)[-1] for x in line][:len(line)]
cols, cid = [], None
for i,colname in enumerate(colnames):
if is_id(colname):
cid = i
elif colname in self.fields:
cols.append((i,self[colname]))
if colname == unique:
unique_idx = i
else:
# every other line contains instead data
items = []
for i, field in cols:
try:
items.append(fix(field, line[i], id_map, id_offset))
except ValueError:
raise RuntimeError("Unable to parse line:%s field:%s value:'%s'"
% (lineno+1,field,line[i]))
if not (id_map or cid is None or id_offset is None or unique_idx):
csv_id = long(line[cid])
curr_id = self.insert(**dict(items))
if first:
first = False
# First curr_id is bigger than csv_id,
# then we are not restoring but
# extending db table with csv db table
id_offset[self._tablename] = (curr_id-csv_id) \
if curr_id>csv_id else 0
# create new id until we get the same as old_id+offset
while curr_id<csv_id+id_offset[self._tablename]:
self._db(self._db[self][colnames[cid]] == curr_id).delete()
curr_id = self.insert(**dict(items))
# Validation. Check for duplicate of 'unique' &,
# if present, update instead of insert.
elif not unique_idx:
new_id = self.insert(**dict(items))
else:
unique_value = line[unique_idx]
query = self._db[self][unique] == unique_value
record = self._db(query).select().first()
if record:
record.update_record(**dict(items))
new_id = record[self._id.name]
else:
new_id = self.insert(**dict(items))
if id_map and cid is not None:
id_map_self[long(line[cid])] = new_id
def as_dict(self, flat=False, sanitize=True):
table_as_dict = dict(
tablename=str(self),
fields=[],
sequence_name=self._sequence_name,
trigger_name=self._trigger_name,
common_filter=self._common_filter,
format=self._format,
singular=self._singular,
plural=self._plural)
for field in self:
if (field.readable or field.writable) or (not sanitize):
table_as_dict["fields"].append(field.as_dict(
flat=flat, sanitize=sanitize))
return table_as_dict
def with_alias(self, alias):
return self._db._adapter.alias(self, alias)
def on(self, query):
return Expression(self._db, self._db._adapter.ON, self, query)
class Expression(object):
def __init__(
self,
db,
op,
first=None,
second=None,
type=None,
**optional_args
):
self.db = db
self.op = op
self.first = first
self.second = second
self._table = getattr(first,'_table',None)
### self._tablename = first._tablename ## CHECK
if not type and first and hasattr(first,'type'):
self.type = first.type
else:
self.type = type
self.optional_args = optional_args
def sum(self):
db = self.db
return Expression(db, db._adapter.AGGREGATE, self, 'SUM', self.type)
def max(self):
db = self.db
return Expression(db, db._adapter.AGGREGATE, self, 'MAX', self.type)
def min(self):
db = self.db
return Expression(db, db._adapter.AGGREGATE, self, 'MIN', self.type)
def len(self):
db = self.db
return Expression(db, db._adapter.LENGTH, self, None, 'integer')
def avg(self):
db = self.db
return Expression(db, db._adapter.AGGREGATE, self, 'AVG', self.type)
def abs(self):
db = self.db
return Expression(db, db._adapter.AGGREGATE, self, 'ABS', self.type)
def lower(self):
db = self.db
return Expression(db, db._adapter.LOWER, self, None, self.type)
def upper(self):
db = self.db
return Expression(db, db._adapter.UPPER, self, None, self.type)
def replace(self, a, b):
db = self.db
return Expression(db, db._adapter.REPLACE, self, (a, b), self.type)
def year(self):
db = self.db
return Expression(db, db._adapter.EXTRACT, self, 'year', 'integer')
def month(self):
db = self.db
return Expression(db, db._adapter.EXTRACT, self, 'month', 'integer')
def day(self):
db = self.db
return Expression(db, db._adapter.EXTRACT, self, 'day', 'integer')
def hour(self):
db = self.db
return Expression(db, db._adapter.EXTRACT, self, 'hour', 'integer')
def minutes(self):
db = self.db
return Expression(db, db._adapter.EXTRACT, self, 'minute', 'integer')
def coalesce(self, *others):
db = self.db
return Expression(db, db._adapter.COALESCE, self, others, self.type)
def coalesce_zero(self):
db = self.db
return Expression(db, db._adapter.COALESCE_ZERO, self, None, self.type)
def seconds(self):
db = self.db
return Expression(db, db._adapter.EXTRACT, self, 'second', 'integer')
def epoch(self):
db = self.db
return Expression(db, db._adapter.EPOCH, self, None, 'integer')
def getitem(self, key, *keys):
db = self.db
keys=(key,)+keys
return Expression(db, db._adapter.GETITEM, self, keys, 'string')
def __getitem__(self, i):
if isinstance(i, slice):
start = i.start or 0
stop = i.stop
db = self.db
if start < 0:
pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
else:
pos0 = start + 1
maxint = sys.maxint if PY2 else sys.maxsize
if stop is None or stop == maxint:
length = self.len()
elif stop < 0:
length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
else:
length = '(%s - %s)' % (stop + 1, pos0)
return Expression(db, db._adapter.SUBSTRING,
self, (pos0, length), self.type)
else:
return self[i:i + 1]
def __str__(self):
return str(self.db._adapter.expand(self, self.type))
def __or__(self, other): # for use in sortby
db = self.db
return Expression(db, db._adapter.COMMA, self, other, self.type)
def __invert__(self):
db = self.db
if hasattr(self,'_op') and self.op == db._adapter.INVERT:
return self.first
return Expression(db, db._adapter.INVERT, self, type=self.type)
def __add__(self, other):
db = self.db
return Expression(db, db._adapter.ADD, self, other, self.type)
def __sub__(self, other):
db = self.db
if self.type in ('integer', 'bigint'):
result_type = 'integer'
elif self.type in ['date','time','datetime','double','float']:
result_type = 'double'
elif self.type.startswith('decimal('):
result_type = self.type
else:
raise SyntaxError("subtraction operation not supported for type")
return Expression(db,db._adapter.SUB,self,other,result_type)
def __mul__(self, other):
db = self.db
return Expression(db,db._adapter.MUL,self,other,self.type)
def __div__(self, other):
db = self.db
return Expression(db,db._adapter.DIV,self,other,self.type)
def __truediv__(self, other):
return self.__div__(other)
def __mod__(self, other):
db = self.db
return Expression(db,db._adapter.MOD,self,other,self.type)
def __eq__(self, value):
db = self.db
return Query(db, db._adapter.EQ, self, value)
def __ne__(self, value):
db = self.db
return Query(db, db._adapter.NE, self, value)
def __lt__(self, value):
db = self.db
return Query(db, db._adapter.LT, self, value)
def __le__(self, value):
db = self.db
return Query(db, db._adapter.LE, self, value)
def __gt__(self, value):
db = self.db
return Query(db, db._adapter.GT, self, value)
def __ge__(self, value):
db = self.db
return Query(db, db._adapter.GE, self, value)
def like(self, value, case_sensitive=True, escape=None):
db = self.db
op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
return Query(db, op, self, value, escape=escape)
def ilike(self, value, escape=None):
return self.like(value, case_sensitive=False, escape=escape)
def regexp(self, value):
db = self.db
return Query(db, db._adapter.REGEXP, self, value)
def belongs(self, *value, **kwattr):
"""
Accepts the following inputs::
field.belongs(1,2)
field.belongs((1,2))
field.belongs(query)
Does NOT accept:
field.belongs(1)
If the set you want back includes `None` values, you can do::
field.belongs((1,None), null=True)
"""
db = self.db
if len(value) == 1:
value = value[0]
if isinstance(value,Query):
value = db(value)._select(value.first._table._id)
elif not isinstance(value, basestring):
value = set(value)
if kwattr.get('null') and None in value:
value.remove(None)
return (self == None) | Query(db, db._adapter.BELONGS, self, value)
return Query(db, db._adapter.BELONGS, self, value)
def startswith(self, value):
db = self.db
if not self.type in ('string', 'text', 'json', 'upload'):
raise SyntaxError("startswith used with incompatible field type")
return Query(db, db._adapter.STARTSWITH, self, value)
def endswith(self, value):
db = self.db
if not self.type in ('string', 'text', 'json', 'upload'):
raise SyntaxError("endswith used with incompatible field type")
return Query(db, db._adapter.ENDSWITH, self, value)
def contains(self, value, all=False, case_sensitive=False):
"""
For GAE contains() is always case sensitive
"""
db = self.db
if isinstance(value,(list, tuple)):
subqueries = [self.contains(str(v),case_sensitive=case_sensitive)
for v in value if str(v)]
if not subqueries:
return self.contains('')
else:
return reduce(all and AND or OR,subqueries)
if not self.type in ('string', 'text', 'json', 'upload') and not self.type.startswith('list:'):
raise SyntaxError("contains used with incompatible field type")
return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
def with_alias(self, alias):
db = self.db
return Expression(db, db._adapter.AS, self, alias, self.type)
# GIS expressions
def st_asgeojson(self, precision=15, options=0, version=1):
return Expression(self.db, self.db._adapter.ST_ASGEOJSON, self,
dict(precision=precision, options=options,
version=version), 'string')
def st_astext(self):
db = self.db
return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
def st_x(self):
db = self.db
return Expression(db, db._adapter.ST_X, self, type='string')
def st_y(self):
db = self.db
return Expression(db, db._adapter.ST_Y, self, type='string')
def st_distance(self, other):
db = self.db
return Expression(db,db._adapter.ST_DISTANCE,self,other, 'double')
def st_simplify(self, value):
db = self.db
return Expression(db, db._adapter.ST_SIMPLIFY, self, value, self.type)
def st_simplifypreservetopology(self, value):
db = self.db
return Expression(db, db._adapter.ST_SIMPLIFYPRESERVETOPOLOGY, self, value, self.type)
# GIS queries
def st_contains(self, value):
db = self.db
return Query(db, db._adapter.ST_CONTAINS, self, value)
def st_equals(self, value):
db = self.db
return Query(db, db._adapter.ST_EQUALS, self, value)
def st_intersects(self, value):
db = self.db
return Query(db, db._adapter.ST_INTERSECTS, self, value)
def st_overlaps(self, value):
db = self.db
return Query(db, db._adapter.ST_OVERLAPS, self, value)
def st_touches(self, value):
db = self.db
return Query(db, db._adapter.ST_TOUCHES, self, value)
def st_within(self, value):
db = self.db
return Query(db, db._adapter.ST_WITHIN, self, value)
def st_dwithin(self, value, distance):
db = self.db
return Query(db, db._adapter.ST_DWITHIN, self, (value, distance))
class FieldVirtual(object):
def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
# for backward compatibility
(self.name, self.f) = (name, f) if f else ('unknown', name)
self.type = ftype
self.label = label or self.name.capitalize().replace('_',' ')
self.represent = lambda v,r=None:v
self.formatter = IDENTITY
self.comment = None
self.readable = True
self.writable = False
self.requires = None
self.widget = None
self.tablename = table_name
self.filter_out = None
def __str__(self):
return '%s.%s' % (self.tablename, self.name)
class FieldMethod(object):
def __init__(self, name, f=None, handler=None):
# for backward compatibility
(self.name, self.f) = (name, f) if f else ('unknown', name)
self.handler = handler
@implements_bool
class Field(Expression, Serializable):
Virtual = FieldVirtual
Method = FieldMethod
Lazy = FieldMethod # for backward compatibility
"""
Represents a database field
Example:
Usage::
a = Field(name, 'string', length=32, default=None, required=False,
requires=IS_NOT_EMPTY(), ondelete='CASCADE',
notnull=False, unique=False,
uploadfield=True, widget=None, label=None, comment=None,
uploadfield=True, # True means store on disk,
# 'a_field_name' means store in this field in db
# False means file content will be discarded.
writable=True, readable=True, update=None, authorize=None,
autodelete=False, represent=None, uploadfolder=None,
uploadseparate=False # upload to separate directories by uuid_keys
# first 2 character and tablename.fieldname
# False - old behavior
# True - put uploaded file in
# <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
# directory)
uploadfs=None # a pyfilesystem where to store upload
)
to be used as argument of `DAL.define_table`
"""
def __init__(
self,
fieldname,
type='string',
length=None,
default=DEFAULT,
required=False,
requires=DEFAULT,
ondelete='CASCADE',
notnull=False,
unique=False,
uploadfield=True,
widget=None,
label=None,
comment=None,
writable=True,
readable=True,
update=None,
authorize=None,
autodelete=False,
represent=None,
uploadfolder=None,
uploadseparate=False,
uploadfs=None,
compute=None,
custom_store=None,
custom_retrieve=None,
custom_retrieve_file_properties=None,
custom_delete=None,
filter_in=None,
filter_out=None,
custom_qualifier=None,
map_none=None,
rname=None
):
self._db = self.db = None # both for backward compatibility
self.op = None
self.first = None
self.second = None
if PY2 and isinstance(fieldname, unicode):
try:
fieldname = str(fieldname)
except UnicodeEncodeError:
raise SyntaxError('Field: invalid unicode field name')
self.name = fieldname = cleanup(fieldname)
if (not isinstance(fieldname, str) or hasattr(Table, fieldname)
or not REGEX_VALID_TB_FLD.match(fieldname)
or REGEX_PYTHON_KEYWORDS.match(fieldname)
):
raise SyntaxError('Field: invalid field name: %s, '
'use rname for "funny" names' % fieldname)
if not isinstance(type, (Table, Field)):
self.type = type
else:
self.type = 'reference %s' % type
self.length = length if not length is None else DEFAULTLENGTH.get(self.type, 512)
self.default = default if default != DEFAULT else (update or None)
self.required = required # is this field required
self.ondelete = ondelete.upper() # this is for reference fields only
self.notnull = notnull
self.unique = unique
self.uploadfield = uploadfield
self.uploadfolder = uploadfolder
self.uploadseparate = uploadseparate
self.uploadfs = uploadfs
self.widget = widget
self.comment = comment
self.writable = writable
self.readable = readable
self.update = update
self.authorize = authorize
self.autodelete = autodelete
self.represent = list_represent if represent is None and \
type in ('list:integer', 'list:string') else represent
self.compute = compute
self.isattachment = True
self.custom_store = custom_store
self.custom_retrieve = custom_retrieve
self.custom_retrieve_file_properties = custom_retrieve_file_properties
self.custom_delete = custom_delete
self.filter_in = filter_in
self.filter_out = filter_out
self.custom_qualifier = custom_qualifier
self.label = (label if label is not None else
fieldname.replace('_', ' ').title())
self.requires = requires if requires is not None else []
self.map_none = map_none
self._rname = rname
def set_attributes(self, *args, **attributes):
self.__dict__.update(*args, **attributes)
def clone(self, point_self_references_to=False, **args):
field = copy.copy(self)
if point_self_references_to and \
field.type == 'reference %s'+field._tablename:
field.type = 'reference %s' % point_self_references_to
field.__dict__.update(args)
return field
def store(self, file, filename=None, path=None):
if self.custom_store:
return self.custom_store(file, filename, path)
if isinstance(file, cgi.FieldStorage):
filename = filename or file.filename
file = file.file
elif not filename:
filename = file.name
filename = os.path.basename(filename.replace('/', os.sep).replace('\\', os.sep))
m = REGEX_STORE_PATTERN.search(filename)
extension = m and m.group('e') or 'txt'
uuid_key = self._db.uuid().replace('-', '')[-16:]
encoded_filename = base64.b16encode(filename).lower()
newfilename = '%s.%s.%s.%s' % \
(self._tablename, self.name, uuid_key, encoded_filename)
newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
self_uploadfield = self.uploadfield
if isinstance(self_uploadfield, Field):
blob_uploadfield_name = self_uploadfield.uploadfield
keys = {self_uploadfield.name: newfilename,
blob_uploadfield_name: file.read()}
self_uploadfield.table.insert(**keys)
elif self_uploadfield is True:
if path:
pass
elif self.uploadfolder:
path = self.uploadfolder
elif self.db._adapter.folder:
path = pjoin(self.db._adapter.folder, '..', 'uploads')
else:
raise RuntimeError(
"you must specify a Field(...,uploadfolder=...)")
if self.uploadseparate:
if self.uploadfs:
raise RuntimeError("not supported")
path = pjoin(path, "%s.%s" % (
self._tablename, self.name), uuid_key[:2]
)
if not exists(path):
os.makedirs(path)
pathfilename = pjoin(path, newfilename)
if self.uploadfs:
dest_file = self.uploadfs.open(newfilename, 'wb')
else:
dest_file = open(pathfilename, 'wb')
try:
shutil.copyfileobj(file, dest_file)
except IOError:
raise IOError(
'Unable to store file "%s" because invalid permissions, '
'readonly file system, or filename too long' % pathfilename)
dest_file.close()
return newfilename
def retrieve(self, name, path=None, nameonly=False):
"""
If `nameonly==True` return (filename, fullfilename) instead of
(filename, stream)
"""
self_uploadfield = self.uploadfield
if self.custom_retrieve:
return self.custom_retrieve(name, path)
if self.authorize or isinstance(self_uploadfield, str):
row = self.db(self == name).select().first()
if not row:
raise NotFoundException
if self.authorize and not self.authorize(row):
raise NotAuthorizedException
file_properties = self.retrieve_file_properties(name, path)
filename = file_properties['filename']
if isinstance(self_uploadfield, str): # ## if file is in DB
stream = StringIO(row[self_uploadfield] or '')
elif isinstance(self_uploadfield, Field):
blob_uploadfield_name = self_uploadfield.uploadfield
query = self_uploadfield == name
data = self_uploadfield.table(query)[blob_uploadfield_name]
stream = StringIO(data)
elif self.uploadfs:
# ## if file is on pyfilesystem
stream = self.uploadfs.open(name, 'rb')
else:
# ## if file is on regular filesystem
# this is intentially a sting with filename and not a stream
# this propagates and allows stream_file_or_304_or_206 to be called
fullname = pjoin(file_properties['path'], name)
if nameonly:
return (filename, fullname)
stream = open(fullname, 'rb')
return (filename, stream)
def retrieve_file_properties(self, name, path=None):
m = REGEX_UPLOAD_PATTERN.match(name)
if not m or not self.isattachment:
raise TypeError('Can\'t retrieve %s file properties' % name)
self_uploadfield = self.uploadfield
if self.custom_retrieve_file_properties:
return self.custom_retrieve_file_properties(name, path)
if m.group('name'):
try:
filename = base64.b16decode(m.group('name'), True)
filename = REGEX_CLEANUP_FN.sub('_', filename)
except (TypeError, AttributeError):
filename = name
else:
filename = name
# ## if file is in DB
if isinstance(self_uploadfield, (str, Field)):
return dict(path=None, filename=filename)
# ## if file is on filesystem
if not path:
if self.uploadfolder:
path = self.uploadfolder
else:
path = pjoin(self.db._adapter.folder, '..', 'uploads')
if self.uploadseparate:
t = m.group('table')
f = m.group('field')
u = m.group('uuidkey')
path = pjoin(path, "%s.%s" % (t, f), u[:2])
return dict(path=path, filename=filename)
def formatter(self, value):
requires = self.requires
if value is None:
return self.map_none
if not requires:
return value
if not isinstance(requires, (list, tuple)):
requires = [requires]
elif isinstance(requires, tuple):
requires = list(requires)
else:
requires = copy.copy(requires)
requires.reverse()
for item in requires:
if hasattr(item, 'formatter'):
value = item.formatter(value)
return value
def validate(self, value):
if not self.requires or self.requires == DEFAULT:
return ((value if value != self.map_none else None), None)
requires = self.requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
for validator in requires:
(value, error) = validator(value)
if error:
return (value, error)
return ((value if value != self.map_none else None), None)
def count(self, distinct=None):
return Expression(self.db, self.db._adapter.COUNT, self, distinct, 'integer')
def as_dict(self, flat=False, sanitize=True):
attrs = (
'name', 'authorize', 'represent', 'ondelete',
'custom_store', 'autodelete', 'custom_retrieve',
'filter_out', 'uploadseparate', 'widget', 'uploadfs',
'update', 'custom_delete', 'uploadfield', 'uploadfolder',
'custom_qualifier', 'unique', 'writable', 'compute',
'map_none', 'default', 'type', 'required', 'readable',
'requires', 'comment', 'label', 'length', 'notnull',
'custom_retrieve_file_properties', 'filter_in')
serializable = (int, long, basestring, float, tuple,
bool, type(None))
def flatten(obj):
if isinstance(obj, dict):
return dict((flatten(k), flatten(v)) for k, v in obj.items())
elif isinstance(obj, (tuple, list, set)):
return [flatten(v) for v in obj]
elif isinstance(obj, serializable):
return obj
elif isinstance(obj, (datetime.datetime,
datetime.date, datetime.time)):
return str(obj)
else:
return None
d = dict()
if not (sanitize and not (self.readable or self.writable)):
for attr in attrs:
if flat:
d.update({attr: flatten(getattr(self, attr))})
else:
d.update({attr: getattr(self, attr)})
d["fieldname"] = d.pop("name")
return d
def __bool__(self):
return True
def __str__(self):
try:
return '%s.%s' % (self.tablename, self.name)
except:
return '<no table>.%s' % self.name
@property
def sqlsafe(self):
if self._table:
return self._table.sqlsafe + '.' + \
(self._rname or self._db._adapter.sqlsafe_field(self.name))
return '<no table>.%s' % self.name
@property
def sqlsafe_name(self):
return self._rname or self._db._adapter.sqlsafe_field(self.name)
class Query(Serializable):
"""
Necessary to define a set.
It can be stored or can be passed to `DAL.__call__()` to obtain a `Set`
Example:
Use as::
query = db.users.name=='Max'
set = db(query)
records = set.select()
"""
def __init__(
self,
db,
op,
first=None,
second=None,
ignore_common_filters=False,
**optional_args
):
self.db = self._db = db
self.op = op
self.first = first
self.second = second
self.ignore_common_filters = ignore_common_filters
self.optional_args = optional_args
def __repr__(self):
from .adapters.base import BaseAdapter
return '<Query %s>' % BaseAdapter.expand(self.db._adapter,self)
def __str__(self):
return str(self.db._adapter.expand(self))
def __and__(self, other):
return Query(self.db,self.db._adapter.AND,self,other)
__rand__ = __and__
def __or__(self, other):
return Query(self.db,self.db._adapter.OR,self,other)
__ror__ = __or__
def __invert__(self):
if self.op==self.db._adapter.NOT:
return self.first
return Query(self.db,self.db._adapter.NOT,self)
def __eq__(self, other):
return repr(self) == repr(other)
def __ne__(self, other):
return not (self == other)
def case(self,t=1,f=0):
return self.db._adapter.CASE(self,t,f)
def as_dict(self, flat=False, sanitize=True):
"""Experimental stuff
This allows to return a plain dictionary with the basic
query representation. Can be used with json/xml services
for client-side db I/O
Example:
Usage::
q = db.auth_user.id != 0
q.as_dict(flat=True)
{
"op": "NE",
"first":{
"tablename": "auth_user",
"fieldname": "id"
},
"second":0
}
"""
SERIALIZABLE_TYPES = (tuple, dict, set, list, int, long, float,
basestring, type(None), bool)
def loop(d):
newd = dict()
for k, v in d.items():
if k in ("first", "second"):
if isinstance(v, self.__class__):
newd[k] = loop(v.__dict__)
elif isinstance(v, Field):
newd[k] = {"tablename": v._tablename,
"fieldname": v.name}
elif isinstance(v, Expression):
newd[k] = loop(v.__dict__)
elif isinstance(v, SERIALIZABLE_TYPES):
newd[k] = v
elif isinstance(v, (datetime.date,
datetime.time,
datetime.datetime)):
newd[k] = unicode(v) if PY2 else str(v)
elif k == "op":
if callable(v):
newd[k] = v.__name__
elif isinstance(v, basestring):
newd[k] = v
else:
pass # not callable or string
elif isinstance(v, SERIALIZABLE_TYPES):
if isinstance(v, dict):
newd[k] = loop(v)
else:
newd[k] = v
return newd
if flat:
return loop(self.__dict__)
else:
return self.__dict__
class Set(Serializable):
"""
Represents a set of records in the database.
Records are identified by the `query=Query(...)` object.
Normally the Set is generated by `DAL.__call__(Query(...))`
Given a set, for example::
myset = db(db.users.name=='Max')
you can::
myset.update(db.users.name='Massimo')
myset.delete() # all elements in the set
myset.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
and take subsets:
subset = myset(db.users.id<5)
"""
def __init__(self, db, query, ignore_common_filters = None):
self.db = db
self._db = db # for backward compatibility
self.dquery = None
# if query is a dict, parse it
if isinstance(query, dict):
query = self.parse(query)
if not ignore_common_filters is None and \
use_common_filters(query) == ignore_common_filters:
query = copy.copy(query)
query.ignore_common_filters = ignore_common_filters
self.query = query
def __repr__(self):
from .adapters.base import BaseAdapter
return '<Set %s>' % BaseAdapter.expand(self.db._adapter,self.query)
def __call__(self, query, ignore_common_filters=False):
if query is None:
return self
elif isinstance(query,Table):
query = self.db._adapter.id_query(query)
elif isinstance(query,str):
query = Expression(self.db,query)
elif isinstance(query,Field):
query = query!=None
if self.query:
return Set(self.db, self.query & query,
ignore_common_filters=ignore_common_filters)
else:
return Set(self.db, query,
ignore_common_filters=ignore_common_filters)
def _count(self,distinct=None):
return self.db._adapter._count(self.query,distinct)
def _select(self, *fields, **attributes):
adapter = self.db._adapter
tablenames = adapter.tables(self.query,
attributes.get('join',None),
attributes.get('left',None),
attributes.get('orderby',None),
attributes.get('groupby',None))
fields = adapter.expand_all(fields, tablenames)
return adapter._select(self.query,fields,attributes)
def _delete(self):
db = self.db
tablename = db._adapter.get_table(self.query)
return db._adapter._delete(tablename,self.query)
def _update(self, **update_fields):
db = self.db
tablename = db._adapter.get_table(self.query)
fields = db[tablename]._listify(update_fields,update=True)
return db._adapter._update(tablename,self.query,fields)
def as_dict(self, flat=False, sanitize=True):
if flat:
uid = dbname = uri = None
codec = self.db._db_codec
if not sanitize:
uri, dbname, uid = (self.db._dbname, str(self.db),
self.db._db_uid)
d = {"query": self.query.as_dict(flat=flat)}
d["db"] = {"uid": uid, "codec": codec,
"name": dbname, "uri": uri}
return d
else:
return self.__dict__
def parse(self, dquery):
"Experimental: Turn a dictionary into a Query object"
self.dquery = dquery
return self.build(self.dquery)
def build(self, d):
"Experimental: see .parse()"
op, first, second = (d["op"], d["first"],
d.get("second", None))
left = right = built = None
if op in ("AND", "OR"):
if not (type(first), type(second)) == (dict, dict):
raise SyntaxError("Invalid AND/OR query")
if op == "AND":
built = self.build(first) & self.build(second)
else: built = self.build(first) | self.build(second)
elif op == "NOT":
if first is None:
raise SyntaxError("Invalid NOT query")
built = ~self.build(first)
else:
# normal operation (GT, EQ, LT, ...)
for k, v in {"left": first, "right": second}.items():
if isinstance(v, dict) and v.get("op"):
v = self.build(v)
if isinstance(v, dict) and ("tablename" in v):
v = self.db[v["tablename"]][v["fieldname"]]
if k == "left": left = v
else: right = v
if hasattr(self.db._adapter, op):
opm = getattr(self.db._adapter, op)
if op == "EQ": built = left == right
elif op == "NE": built = left != right
elif op == "GT": built = left > right
elif op == "GE": built = left >= right
elif op == "LT": built = left < right
elif op == "LE": built = left <= right
elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
built = Expression(self.db, opm)
elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
"COALESCE_ZERO", "RAW", "INVERT"):
built = Expression(self.db, opm, left)
elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
"REGEXP", "LIKE", "ILIKE", "STARTSWITH",
"ENDSWITH", "ADD", "SUB", "MUL", "DIV",
"MOD", "AS", "ON", "COMMA", "NOT_NULL",
"COALESCE", "CONTAINS", "BELONGS"):
built = Expression(self.db, opm, left, right)
# expression as string
elif not (left or right): built = Expression(self.db, op)
else:
raise SyntaxError("Operator not supported: %s" % op)
return built
def isempty(self):
return not self.select(limitby=(0,1), orderby_on_limitby=False)
def count(self,distinct=None, cache=None):
db = self.db
if cache:
sql = self._count(distinct=distinct)
if isinstance(cache,dict):
cache_model = cache['model']
time_expire = cache['expiration']
key = cache.get('key')
if not key:
key = db._uri + '/' + sql
key = hashlib_md5(key).hexdigest()
else:
cache_model, time_expire = cache
key = db._uri + '/' + sql
key = hashlib_md5(key).hexdigest()
return cache_model(
key,
(lambda self=self,distinct=distinct: \
db._adapter.count(self.query,distinct)),
time_expire)
return db._adapter.count(self.query,distinct)
def select(self, *fields, **attributes):
adapter = self.db._adapter
tablenames = adapter.tables(self.query,
attributes.get('join',None),
attributes.get('left',None),
attributes.get('orderby',None),
attributes.get('groupby',None))
fields = adapter.expand_all(fields, tablenames)
return adapter.select(self.query,fields,attributes)
def iterselect(self, *fields, **attributes):
adapter = self.db._adapter
tablenames = adapter.tables(self.query,
attributes.get('join',None),
attributes.get('left',None),
attributes.get('orderby',None),
attributes.get('groupby',None))
fields = adapter.expand_all(fields, tablenames)
return adapter.iterselect(self.query,fields,attributes)
def nested_select(self,*fields,**attributes):
return Expression(self.db,self._select(*fields,**attributes))
def delete(self):
db = self.db
tablename = db._adapter.get_table(self.query)
table = db[tablename]
if any(f(self) for f in table._before_delete): return 0
ret = db._adapter.delete(tablename,self.query)
ret and [f(self) for f in table._after_delete]
return ret
def update(self, **update_fields):
db = self.db
tablename = db._adapter.get_table(self.query)
table = db[tablename]
table._attempt_upload(update_fields)
if any(f(self,update_fields) for f in table._before_update):
return 0
fields = table._listify(update_fields,update=True)
if not fields:
raise SyntaxError("No fields to update")
ret = db._adapter.update("%s" % table._tablename,self.query,fields)
ret and [f(self,update_fields) for f in table._after_update]
return ret
def update_naive(self, **update_fields):
"""
Same as update but does not call table._before_update and _after_update
"""
tablename = self.db._adapter.get_table(self.query)
table = self.db[tablename]
fields = table._listify(update_fields,update=True)
if not fields: raise SyntaxError("No fields to update")
ret = self.db._adapter.update("%s" % table,self.query,fields)
return ret
def validate_and_update(self, **update_fields):
tablename = self.db._adapter.get_table(self.query)
response = Row()
response.errors = Row()
new_fields = copy.copy(update_fields)
for key,value in iteritems(update_fields):
value,error = self.db[tablename][key].validate(value)
if error:
response.errors[key] = '%s' % error
else:
new_fields[key] = value
table = self.db[tablename]
if response.errors:
response.updated = None
else:
if not any(f(self, new_fields) for f in table._before_update):
table._attempt_upload(new_fields)
fields = table._listify(new_fields,update=True)
if not fields: raise SyntaxError("No fields to update")
ret = self.db._adapter.update(tablename,self.query,fields)
ret and [f(self,new_fields) for f in table._after_update]
else:
ret = 0
response.updated = ret
return response
def delete_uploaded_files(self, upload_fields=None):
table = self.db[self.db._adapter.tables(self.query)[0]]
# ## mind uploadfield==True means file is not in DB
if upload_fields:
fields = list(upload_fields)
# Explicity add compute upload fields (ex: thumbnail)
fields += [f for f in table.fields if table[f].compute is not None]
else:
fields = table.fields
fields = [f for f in fields if table[f].type == 'upload'
and table[f].uploadfield == True
and table[f].autodelete]
if not fields:
return False
for record in self.select(*[table[f] for f in fields]):
for fieldname in fields:
field = table[fieldname]
oldname = record.get(fieldname, None)
if not oldname:
continue
if (upload_fields and fieldname in upload_fields and
oldname == upload_fields[fieldname]):
continue
if field.custom_delete:
field.custom_delete(oldname)
else:
uploadfolder = field.uploadfolder
if not uploadfolder:
uploadfolder = pjoin(
self.db._adapter.folder, '..', 'uploads')
if field.uploadseparate:
items = oldname.split('.')
uploadfolder = pjoin(
uploadfolder,
"%s.%s" % (items[0], items[1]),
items[2][:2])
oldpath = pjoin(uploadfolder, oldname)
if exists(oldpath):
os.unlink(oldpath)
return False
class LazyReferenceGetter(object):
def __init__(self, table, id):
self.db, self.tablename, self.id = table._db, table._tablename, id
def __call__(self, other_tablename):
if self.db._lazy_tables is False:
raise AttributeError()
table = self.db[self.tablename]
other_table = self.db[other_tablename]
for rfield in table._referenced_by:
if rfield.table == other_table:
return LazySet(rfield, self.id)
raise AttributeError()
class LazySet(object):
def __init__(self, field, id):
self.db, self.tablename, self.fieldname, self.id = \
field.db, field._tablename, field.name, id
def _getset(self):
query = self.db[self.tablename][self.fieldname]==self.id
return Set(self.db,query)
def __repr__(self):
return repr(self._getset())
def __call__(self, query, ignore_common_filters=False):
return self._getset()(query, ignore_common_filters)
def _count(self,distinct=None):
return self._getset()._count(distinct)
def _select(self, *fields, **attributes):
return self._getset()._select(*fields,**attributes)
def _delete(self):
return self._getset()._delete()
def _update(self, **update_fields):
return self._getset()._update(**update_fields)
def isempty(self):
return self._getset().isempty()
def count(self,distinct=None, cache=None):
return self._getset().count(distinct,cache)
def select(self, *fields, **attributes):
return self._getset().select(*fields,**attributes)
def nested_select(self,*fields,**attributes):
return self._getset().nested_select(*fields,**attributes)
def delete(self):
return self._getset().delete()
def update(self, **update_fields):
return self._getset().update(**update_fields)
def update_naive(self, **update_fields):
return self._getset().update_naive(**update_fields)
def validate_and_update(self, **update_fields):
return self._getset().validate_and_update(**update_fields)
def delete_uploaded_files(self, upload_fields=None):
return self._getset().delete_uploaded_files(upload_fields)
class VirtualCommand(object):
def __init__(self,method,row):
self.method=method
self.row=row
def __call__(self,*args,**kwargs):
return self.method(self.row,*args,**kwargs)
@implements_bool
class BasicRows(object):
"""
Abstract class for Rows and IterRows
"""
def __bool__(self):
return True if self.first() is not None else False
def __str__(self):
"""
Serializes the table into a csv file
"""
s = StringIO()
self.export_to_csv_file(s)
return s.getvalue()
def as_trees(self, parent_name='parent_id', children_name='children',
render=False):
"""
returns the data as list of trees.
:param parent_name: the name of the field holding the reference to the
parent (default parent_id).
:param children_name: the name where the children of each row will be
stored as a list (default children).
:param render: whether we will render the fields using their represent
(default False) can be a list of fields to render or
True to render all.
"""
roots = []
drows = {}
rows = list(self.render(fields=None if render is True else render)) \
if render else self
for row in rows:
drows[row.id] = row
row[children_name] = []
for row in rows:
parent = row[parent_name]
if parent is None:
roots.append(row)
else:
drows[parent][children_name].append(row)
return roots
def as_list(self,
compact=True,
storage_to_dict=True,
datetime_to_str=False,
custom_types=None):
"""
Returns the data as a list or dictionary.
Args:
storage_to_dict: when True returns a dict, otherwise a list
datetime_to_str: convert datetime fields as strings
"""
(oc, self.compact) = (self.compact, compact)
if storage_to_dict:
items = [item.as_dict(datetime_to_str, custom_types)
for item in self]
else:
items = [item for item in self]
self.compact = oc
return items
def as_dict(self,
key='id',
compact=True,
storage_to_dict=True,
datetime_to_str=False,
custom_types=None):
"""
Returns the data as a dictionary of dictionaries (storage_to_dict=True)
or records (False)
Args:
key: the name of the field to be used as dict key, normally the id
compact: ? (default True)
storage_to_dict: when True returns a dict, otherwise a list(default True)
datetime_to_str: convert datetime fields as strings (default False)
"""
# test for multiple rows
multi = False
f = self.first()
if f and isinstance(key, basestring):
multi = any([isinstance(v, f.__class__) for v in f.values()])
if ("." not in key) and multi:
# No key provided, default to int indices
def new_key():
i = 0
while True:
yield i
i += 1
key_generator = new_key()
key = lambda r: key_generator.next()
rows = self.as_list(compact, storage_to_dict, datetime_to_str,
custom_types)
if isinstance(key, str) and key.count('.') == 1:
(table, field) = key.split('.')
return dict([(r[table][field], r) for r in rows])
elif isinstance(key, str):
return dict([(r[key], r) for r in rows])
else:
return dict([(key(r), r) for r in rows])
def xml(self, strict=False, row_name='row', rows_name='rows'):
"""
Serializes the table using sqlhtml.SQLTABLE (if present)
"""
if not strict and not self.db.has_representer('rows_xml'):
strict = True
if strict:
return '<%s>\n%s\n</%s>' % (
rows_name,
'\n'.join(
row.as_xml(
row_name=row_name,
colnames=self.colnames
) for row in self),
rows_name
)
rv = self.db.represent('rows_xml', self)
if hasattr(rv, 'xml') and callable(getattr(rv, 'xml')):
return rv.xml()
return rv
def as_xml(self, row_name='row', rows_name='rows'):
return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
def as_json(self, mode='object', default=None):
"""
Serializes the rows to a JSON list or object with objects
mode='object' is not implemented (should return a nested
object structure)
"""
items = [record.as_json(
mode=mode, default=default, serialize=False,
colnames=self.colnames
) for record in self]
return serializers.json(items)
def export_to_csv_file(self, ofile, null='<NULL>', *args, **kwargs):
"""
Exports data to csv, the first line contains the column names
Args:
ofile: where the csv must be exported to
null: how null values must be represented (default '<NULL>')
delimiter: delimiter to separate values (default ',')
quotechar: character to use to quote string values (default '"')
quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
represent: use the fields .represent value (default False)
colnames: list of column names to use (default self.colnames)
This will only work when exporting rows objects!!!!
DO NOT use this with db.export_to_csv()
"""
delimiter = kwargs.get('delimiter', ',')
quotechar = kwargs.get('quotechar', '"')
quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
represent = kwargs.get('represent', False)
writer = csv.writer(ofile, delimiter=delimiter,
quotechar=quotechar, quoting=quoting)
def unquote_colnames(colnames):
unq_colnames = []
for col in colnames:
m = self.db._adapter.REGEX_TABLE_DOT_FIELD.match(col)
if not m:
unq_colnames.append(col)
else:
unq_colnames.append('.'.join(m.groups()))
return unq_colnames
colnames = kwargs.get('colnames', self.colnames)
write_colnames = kwargs.get('write_colnames', True)
# a proper csv starting with the column names
if write_colnames:
writer.writerow(unquote_colnames(colnames))
def none_exception(value):
"""
Returns a cleaned up value that can be used for csv export:
- unicode text is encoded as such
- None values are replaced with the given representation (default <NULL>)
"""
if value is None:
return null
elif PY2 and isinstance(value, unicode):
return value.encode('utf8')
elif isinstance(value, Reference):
return long(value)
elif hasattr(value, 'isoformat'):
return value.isoformat()[:19].replace('T', ' ')
elif isinstance(value, (list, tuple)): # for type='list:..'
return bar_encode(value)
return value
repr_cache = {}
for record in self:
row = []
for col in colnames:
m = self.db._adapter.REGEX_TABLE_DOT_FIELD.match(col)
if not m:
row.append(record._extra[col])
else:
(t, f) = m.groups()
field = self.db[t][f]
if isinstance(record.get(t, None), (Row, dict)):
value = record[t][f]
else:
value = record[f]
if field.type == 'blob' and value is not None:
value = base64.b64encode(value)
elif represent and field.represent:
if field.type.startswith('reference'):
if field not in repr_cache:
repr_cache[field] = {}
if value not in repr_cache[field]:
repr_cache[field][value] = field.represent(
value, record
)
value = repr_cache[field][value]
else:
value = field.represent(value, record)
row.append(none_exception(value))
writer.writerow(row)
# for consistent naming yet backwards compatible
as_csv = __str__
json = as_json
class Rows(BasicRows):
"""
A wrapper for the return value of a select. It basically represents a table.
It has an iterator and each row is represented as a `Row` dictionary.
"""
# ## TODO: this class still needs some work to care for ID/OID
def __init__(self, db=None, records=[], colnames=[], compact=True,
rawrows=None):
self.db = db
self.records = records
self.colnames = colnames
self.compact = compact
self.response = rawrows
def __repr__(self):
return '<Rows (%s)>' % len(self.records)
def setvirtualfields(self, **keyed_virtualfields):
"""
For reference::
db.define_table('x',Field('number','integer'))
if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
from gluon.dal import lazy_virtualfield
class MyVirtualFields(object):
# normal virtual field (backward compatible, discouraged)
def normal_shift(self): return self.x.number+1
# lazy virtual field (because of @staticmethod)
@lazy_virtualfield
def lazy_shift(instance,row,delta=4): return row.x.number+delta
db.x.virtualfields.append(MyVirtualFields())
for row in db(db.x).select():
print row.number, row.normal_shift, row.lazy_shift(delta=7)
"""
if not keyed_virtualfields:
return self
for row in self.records:
for (tablename, virtualfields) in iteritems(keyed_virtualfields):
attributes = dir(virtualfields)
if tablename not in row:
box = row[tablename] = Row()
else:
box = row[tablename]
updated = False
for attribute in attributes:
if attribute[0] != '_':
method = getattr(virtualfields, attribute)
if hasattr(method, '__lazy__'):
box[attribute] = VirtualCommand(method, row)
elif type(method) == types.MethodType:
if not updated:
virtualfields.__dict__.update(row)
updated = True
box[attribute] = method()
return self
def __and__(self, other):
if self.colnames != other.colnames:
raise Exception('Cannot & incompatible Rows objects')
records = self.records+other.records
return Rows(self.db, records, self.colnames,
compact=self.compact or other.compact)
def __or__(self, other):
if self.colnames != other.colnames:
raise Exception('Cannot | incompatible Rows objects')
records = [record for record in other.records
if record not in self.records]
records = self.records + records
return Rows(self.db, records, self.colnames,
compact=self.compact or other.compact)
def __len__(self):
return len(self.records)
def __getslice__(self, a, b):
return Rows(self.db, self.records[a:b], self.colnames,
compact=self.compact)
def __getitem__(self, i):
row = self.records[i]
keys = list(row.keys())
if self.compact and len(keys) == 1 and keys[0] != '_extra':
return row[keys[0]]
return row
def __iter__(self):
"""
Iterator over records
"""
for i in xrange(len(self)):
yield self[i]
def __eq__(self, other):
if isinstance(other, Rows):
return (self.records == other.records)
else:
return False
def column(self, column=None):
return [r[str(column) if column else self.colnames[0]] for r in self]
def first(self):
if not self.records:
return None
return self[0]
def last(self):
if not self.records:
return None
return self[-1]
def find(self, f, limitby=None):
"""
Returns a new Rows object, a subset of the original object,
filtered by the function `f`
"""
if not self:
return Rows(self.db, [], self.colnames, compact=self.compact)
records = []
if limitby:
a, b = limitby
else:
a, b = 0, len(self)
k = 0
for i, row in enumerate(self):
if f(row):
if a <= k:
records.append(self.records[i])
k += 1
if k == b:
break
return Rows(self.db, records, self.colnames, compact=self.compact)
def exclude(self, f):
"""
Removes elements from the calling Rows object, filtered by the function
`f`, and returns a new Rows object containing the removed elements
"""
if not self.records:
return Rows(self.db, [], self.colnames, compact=self.compact)
removed = []
i = 0
while i < len(self):
row = self[i]
if f(row):
removed.append(self.records[i])
del self.records[i]
else:
i += 1
return Rows(self.db, removed, self.colnames, compact=self.compact)
def sort(self, f, reverse=False):
"""
Returns a list of sorted elements (not sorted in place)
"""
rows = Rows(self.db, [], self.colnames, compact=self.compact)
# When compact=True, iterating over self modifies each record,
# so when sorting self, it is necessary to return a sorted
# version of self.records rather than the sorted self directly.
rows.records = [r for (r, s) in sorted(zip(self.records, self),
key=lambda r: f(r[1]),
reverse=reverse)]
return rows
def group_by_value(self, *fields, **args):
"""
Regroups the rows, by one of the fields
"""
one_result = False
if 'one_result' in args:
one_result = args['one_result']
def build_fields_struct(row, fields, num, groups):
''' helper function:
'''
if num > len(fields)-1:
if one_result:
return row
else:
return [row]
key = fields[num]
value = row[key]
if value not in groups:
groups[value] = build_fields_struct(row, fields, num+1, {})
else:
struct = build_fields_struct(row, fields, num+1, groups[value])
# still have more grouping to do
if type(struct) == type(dict()):
groups[value].update()
# no more grouping, first only is off
elif type(struct) == type(list()):
groups[value] += struct
# no more grouping, first only on
else:
groups[value] = struct
return groups
if len(fields) == 0:
return self
# if select returned no results
if not self.records:
return {}
grouped_row_group = dict()
# build the struct
for row in self:
build_fields_struct(row, fields, 0, grouped_row_group)
return grouped_row_group
def render(self, i=None, fields=None):
"""
Takes an index and returns a copy of the indexed row with values
transformed via the "represent" attributes of the associated fields.
Args:
i: index. If not specified, a generator is returned for iteration
over all the rows.
fields: a list of fields to transform (if None, all fields with
"represent" attributes will be transformed)
"""
if i is None:
return (self.render(i, fields=fields) for i in range(len(self)))
if not self.db.has_representer('rows_render'):
raise RuntimeError("Rows.render() needs a `rows_render` \
representer in DAL instance")
row = copy.deepcopy(self.records[i])
keys = list(row.keys())
tables = [f.tablename for f in fields] if fields \
else [k for k in keys if k != '_extra']
for table in tables:
repr_fields = [f.name for f in fields if f.tablename == table] \
if fields else [k for k in row[table].keys()
if (hasattr(self.db[table], k) and
isinstance(self.db[table][k], Field)
and self.db[table][k].represent)]
for field in repr_fields:
row[table][field] = self.db.represent(
'rows_render', self.db[table][field], row[table][field],
row[table])
if self.compact and len(keys) == 1 and keys[0] != '_extra':
return row[keys[0]]
return row
@implements_iterator
class IterRows(BasicRows):
def __init__(self, db, sql, fields, colnames, blob_decode, cacheable):
self.db = db
self.fields = fields
self.colnames = colnames
self.blob_decode = blob_decode
self.cacheable = cacheable
(self.fields_virtual, self.fields_lazy, self.tmps) = \
self.db._adapter._parse_expand_colnames(colnames)
self.db._adapter.execute(sql)
self._head = None
self.last_item = None
self.last_item_id = None
self.compact = True
def __next__(self):
db_row = self.db._adapter._fetchone()
if db_row is None:
raise StopIteration
row = self.db._adapter._parse(db_row, self.tmps, self.fields,
self.colnames, self.blob_decode,
self.cacheable, self.fields_virtual,
self.fields_lazy)
if self.compact:
# The following is to translate
# <Row {'t0': {'id': 1L, 'name': 'web2py'}}>
# in
# <Row {'id': 1L, 'name': 'web2py'}>
# normally accomplished by Rows.__get_item__
keys = list(row.keys())
if len(keys) == 1 and keys[0] != '_extra':
row = row[keys[0]]
return row
def __iter__(self):
if self._head:
yield self._head
row = next(self)
while row is not None:
yield row
row = next(self)
return
def first(self):
if self._head is None:
try:
self._head = next(self)
except StopIteration:
# TODO should I raise something?
return None
return self._head
def __getitem__(self, key):
if not isinstance(key, (int, long)):
raise TypeError
if key == self.last_item_id:
return self.last_item
n_to_drop = key
if self.last_item_id is not None:
if self.last_item_id < key:
n_to_drop -= (self.last_item_id + 1)
else:
raise IndexError
# fetch and drop the first key - 1 elements
for i in xrange(n_to_drop):
self.db._adapter._fetchone()
row = next(self)
if row is None:
raise IndexError
else:
self.last_item_id = key
self.last_item = row
return row
# # rowcount it doesn't seem to be reliable on all drivers
# def __len__(self):
# return self.db._adapter.cursor.rowcount
| {
"content_hash": "5f1ff07440d8beca8eb7dbd405ee83a4",
"timestamp": "",
"source": "github",
"line_count": 2767,
"max_line_length": 129,
"avg_line_length": 36.991326346223346,
"alnum_prop": 0.5205607933173758,
"repo_name": "manuelep/pydal",
"id": "eedeac1a4b4e184629598da08ca445ccd685ba8a",
"size": "102380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydal/objects.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "765456"
}
],
"symlink_target": ""
} |
import proto # type: ignore
from google.ads.googleads.v10.enums.types import advertising_channel_type
from google.ads.googleads.v10.enums.types import device
from google.ads.googleads.v10.enums.types import seasonality_event_scope
from google.ads.googleads.v10.enums.types import seasonality_event_status
__protobuf__ = proto.module(
package="google.ads.googleads.v10.resources",
marshal="google.ads.googleads.v10",
manifest={
"BiddingSeasonalityAdjustment",
},
)
class BiddingSeasonalityAdjustment(proto.Message):
r"""Represents a bidding seasonality adjustment.
See "About seasonality adjustments" at
https://support.google.com/google-ads/answer/10369906.
Attributes:
resource_name (str):
Immutable. The resource name of the seasonality adjustment.
Seasonality adjustment resource names have the form:
``customers/{customer_id}/biddingSeasonalityAdjustments/{seasonality_adjustment_id}``
seasonality_adjustment_id (int):
Output only. The ID of the seasonality
adjustment.
scope (google.ads.googleads.v10.enums.types.SeasonalityEventScopeEnum.SeasonalityEventScope):
The scope of the seasonality adjustment.
status (google.ads.googleads.v10.enums.types.SeasonalityEventStatusEnum.SeasonalityEventStatus):
Output only. The status of the seasonality
adjustment.
start_date_time (str):
Required. The inclusive start time of the
seasonality adjustment in yyyy-MM-dd HH:mm:ss
format.
A seasonality adjustment is forward looking and
should be used for events that start and end in
the future.
end_date_time (str):
Required. The exclusive end time of the seasonality
adjustment in yyyy-MM-dd HH:mm:ss format.
The length of [start_date_time, end_date_time) interval must
be within (0, 14 days].
name (str):
The name of the seasonality adjustment. The
name can be at most 255 characters.
description (str):
The description of the seasonality
adjustment. The description can be at most 2048
characters.
devices (Sequence[google.ads.googleads.v10.enums.types.DeviceEnum.Device]):
If not specified, all devices will be
included in this adjustment. Otherwise, only the
specified targeted devices will be included in
this adjustment.
conversion_rate_modifier (float):
Conversion rate modifier estimated based on
expected conversion rate changes. When this
field is unset or set to 1.0 no adjustment will
be applied to traffic. The allowed range is 0.1
to 10.0.
campaigns (Sequence[str]):
The seasonality adjustment will apply to the campaigns
listed when the scope of this adjustment is CAMPAIGN. The
maximum number of campaigns per event is 2000. Note: a
seasonality adjustment with both advertising_channel_types
and campaign_ids is not supported.
advertising_channel_types (Sequence[google.ads.googleads.v10.enums.types.AdvertisingChannelTypeEnum.AdvertisingChannelType]):
The seasonality adjustment will apply to all the campaigns
under the listed channels retroactively as well as going
forward when the scope of this adjustment is CHANNEL. The
supported advertising channel types are DISPLAY, SEARCH and
SHOPPING. Note: a seasonality adjustment with both
advertising_channel_types and campaign_ids is not supported.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
seasonality_adjustment_id = proto.Field(
proto.INT64,
number=2,
)
scope = proto.Field(
proto.ENUM,
number=3,
enum=seasonality_event_scope.SeasonalityEventScopeEnum.SeasonalityEventScope,
)
status = proto.Field(
proto.ENUM,
number=4,
enum=seasonality_event_status.SeasonalityEventStatusEnum.SeasonalityEventStatus,
)
start_date_time = proto.Field(
proto.STRING,
number=5,
)
end_date_time = proto.Field(
proto.STRING,
number=6,
)
name = proto.Field(
proto.STRING,
number=7,
)
description = proto.Field(
proto.STRING,
number=8,
)
devices = proto.RepeatedField(
proto.ENUM,
number=9,
enum=device.DeviceEnum.Device,
)
conversion_rate_modifier = proto.Field(
proto.DOUBLE,
number=10,
)
campaigns = proto.RepeatedField(
proto.STRING,
number=11,
)
advertising_channel_types = proto.RepeatedField(
proto.ENUM,
number=12,
enum=advertising_channel_type.AdvertisingChannelTypeEnum.AdvertisingChannelType,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| {
"content_hash": "0b2dd32896f84edc49ae124edc4e218a",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 133,
"avg_line_length": 37.21167883211679,
"alnum_prop": 0.6502550019615535,
"repo_name": "googleads/google-ads-python",
"id": "ed450b0d75be35186d4b73823dbf4769134c7b04",
"size": "5698",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/ads/googleads/v10/resources/types/bidding_seasonality_adjustment.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "23399881"
}
],
"symlink_target": ""
} |
import logging
import sys
import click
import yaml
LOG_MAP = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
def get_config(config_path):
config = yaml.safe_load(config_path.read_bytes())
required_settings = [
"twitch_username",
"twitch_oauth",
"twitch_channels",
"client_id",
]
for setting in required_settings:
if setting not in config:
raise click.Abort(
f"{setting} not present in config.txt, put it there! check config_example.txt!"
)
return config
def logging_config(verbosity):
log_level = LOG_MAP[verbosity]
return {
"disable_existing_loggers": False,
"raiseExceptions": True,
"version": 1,
"handlers": {
"console": {
"formatter": "colorlog",
"stream": sys.stdout,
"class": "logging.StreamHandler",
}
},
"loggers": {
"": {"level": log_level, "handlers": ["console"]},
"twitchchat_display": {"propagate": True, "level": log_level},
},
"formatters": {
"colorlog": {
"()": "colorlog.ColoredFormatter",
"datefmt": "%Y-%m-%d %H:%M:%S",
"format": "[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s] - %(message)s",
"log_colors": {
"DEBUG": "purple",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
},
},
},
}
| {
"content_hash": "a74d174c0094d81e082e71011262a93a",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 137,
"avg_line_length": 29.43859649122807,
"alnum_prop": 0.4731823599523242,
"repo_name": "shughes-uk/twitchchat_display",
"id": "e453e79862b37faa5338317fa8389db844a46a0b",
"size": "1678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twitchchat_display/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21995"
}
],
"symlink_target": ""
} |
"""Fixer for 'raise E, V'
From Armin Ronacher's ``python-modernize``.
raise -> raise
raise E -> raise E
raise E, 5 -> raise E(5)
raise E, 5, T -> raise E(5).with_traceback(T)
raise E, None, T -> raise E.with_traceback(T)
raise (((E, E'), E''), E'''), 5 -> raise E(5)
raise "foo", V, T -> warns about string exceptions
raise E, (V1, V2) -> raise E(V1, V2)
raise E, (V1, V2), T -> raise E(V1, V2).with_traceback(T)
CAVEATS:
1) "raise E, V, T" cannot be translated safely in general. If V
is not a tuple or a (number, string, None) literal, then:
raise E, V, T -> from future.utils import raise_
raise_(E, V, T)
"""
# Author: Collin Winter, Armin Ronacher, Mark Huang
# Local imports
from lib2to3 import pytree, fixer_base
from lib2to3.pgen2 import token
from lib2to3.fixer_util import Name, Call, is_tuple, Comma, Attr, ArgList
from libfuturize.fixer_util import touch_import_top
class FixRaise(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
"""
def transform(self, node, results):
syms = self.syms
exc = results["exc"].clone()
if exc.type == token.STRING:
msg = "Python 3 does not support string exceptions"
self.cannot_convert(node, msg)
return
# Python 2 supports
# raise ((((E1, E2), E3), E4), E5), V
# as a synonym for
# raise E1, V
# Since Python 3 will not support this, we recurse down any tuple
# literals, always taking the first element.
if is_tuple(exc):
while is_tuple(exc):
# exc.children[1:-1] is the unparenthesized tuple
# exc.children[1].children[0] is the first element of the tuple
exc = exc.children[1].children[0].clone()
exc.prefix = u" "
if "tb" in results:
tb = results["tb"].clone()
else:
tb = None
if "val" in results:
val = results["val"].clone()
if is_tuple(val):
# Assume that exc is a subclass of Exception and call exc(*val).
args = [c.clone() for c in val.children[1:-1]]
exc = Call(exc, args)
elif val.type in (token.NUMBER, token.STRING):
# Handle numeric and string literals specially, e.g.
# "raise Exception, 5" -> "raise Exception(5)".
val.prefix = u""
exc = Call(exc, [val])
elif val.type == token.NAME and val.value == u"None":
# Handle None specially, e.g.
# "raise Exception, None" -> "raise Exception".
pass
else:
# val is some other expression. If val evaluates to an instance
# of exc, it should just be raised. If val evaluates to None,
# a default instance of exc should be raised (as above). If val
# evaluates to a tuple, exc(*val) should be called (as
# above). Otherwise, exc(val) should be called. We can only
# tell what to do at runtime, so defer to future.utils.raise_(),
# which handles all of these cases.
touch_import_top(u"future.utils", u"raise_", node)
exc.prefix = u""
args = [exc, Comma(), val]
if tb is not None:
args += [Comma(), tb]
return Call(Name(u"raise_"), args, prefix=node.prefix)
if tb is not None:
tb.prefix = ""
exc_list = Attr(exc, Name('with_traceback')) + [ArgList([tb])]
else:
exc_list = [exc]
return pytree.Node(syms.raise_stmt,
[Name(u"raise")] + exc_list,
prefix=node.prefix)
| {
"content_hash": "987f39761156c07e828ac18daa36422d",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 80,
"avg_line_length": 36.48598130841121,
"alnum_prop": 0.5322745901639344,
"repo_name": "PythonCharmers/python-future",
"id": "d113401c7df885320d326a8733172d3cc07bd407",
"size": "3904",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/libfuturize/fixes/fix_raise.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2465543"
},
{
"name": "Shell",
"bytes": "68"
}
],
"symlink_target": ""
} |
from unittest import mock
from trove.common import cfg
from trove.common.rpc import conductor_guest_serializer as gsz
from trove.common.rpc import conductor_host_serializer as hsz
from trove.tests.unittests import trove_testtools
CONF = cfg.CONF
class FakeInstance(object):
def __init__(self):
self.uuid = 'a3af1652-686a-4574-a916-2ef7e85136e5'
@property
def key(self):
return 'mo79Y86Bp3bzQDWR31ihhVGfLBmeac'
class FakeContext(object):
def __init__(self, instance_id=None, fields=None):
self.instance_id = instance_id
self.fields = fields
class TestConductorSerializer(trove_testtools.TestCase):
def setUp(self):
self.uuid = 'a3af1652-686a-4574-a916-2ef7e85136e5'
self.key = 'mo79Y86Bp3bzQDWR31ihhVGfLBmeac'
self.data = 'ELzWd81qtgcj2Gxc1ipbh0HgbvHGrgptDj3n4GNMBN0F2WtNdr'
self.context = {'a': 'ij2J8AJLyz0rDqbjxy4jPVINhnK2jsBGpWRKIe3tUnUD',
'b': 32,
'c': {'a': 21, 'b': 22}}
self.old_guest_id = gsz.CONF.guest_id
gsz.CONF.guest_id = self.uuid
super(TestConductorSerializer, self).setUp()
def tearDown(self):
gsz.CONF.guest_id = self.old_guest_id
super(TestConductorSerializer, self).tearDown()
def test_gsz_serialize_entity_nokey(self):
sz = gsz.ConductorGuestSerializer(None, None)
self.assertEqual(sz.serialize_entity(self.context, self.data),
self.data)
def test_gsz_serialize_context_nokey(self):
sz = gsz.ConductorGuestSerializer(None, None)
self.assertEqual(sz.serialize_context(self.context),
self.context)
@mock.patch('trove.common.rpc.conductor_host_serializer.'
'get_instance_encryption_key',
return_value='mo79Y86Bp3bzQDWR31ihhVGfLBmeac')
def test_hsz_serialize_entity_nokey_noinstance(self, _):
sz = hsz.ConductorHostSerializer(None, None)
ctxt = FakeContext(instance_id=None)
self.assertEqual(sz.serialize_entity(ctxt, self.data),
self.data)
@mock.patch('trove.common.rpc.conductor_host_serializer.'
'get_instance_encryption_key',
return_value='mo79Y86Bp3bzQDWR31ihhVGfLBmeac')
def test_hsz_serialize_context_nokey_noinstance(self, _):
sz = hsz.ConductorHostSerializer(None, None)
ctxt = FakeContext(instance_id=None)
self.assertEqual(sz.serialize_context(ctxt), ctxt)
@mock.patch('trove.common.rpc.conductor_host_serializer.'
'get_instance_encryption_key',
return_value='mo79Y86Bp3bzQDWR31ihhVGfLBmeac')
def test_conductor_entity(self, _):
guestsz = gsz.ConductorGuestSerializer(None, self.key)
hostsz = hsz.ConductorHostSerializer(None, None)
encrypted_entity = guestsz.serialize_entity(self.context, self.data)
self.assertNotEqual(encrypted_entity, self.data)
entity = hostsz.deserialize_entity(self.context, encrypted_entity)
self.assertEqual(entity, self.data)
@mock.patch('trove.common.rpc.conductor_host_serializer.'
'get_instance_encryption_key',
return_value='mo79Y86Bp3bzQDWR31ihhVGfLBmeac')
def test_conductor_context(self, _):
guestsz = gsz.ConductorGuestSerializer(None, self.key)
hostsz = hsz.ConductorHostSerializer(None, None)
encrypted_context = guestsz.serialize_context(self.context)
self.assertNotEqual(encrypted_context, self.context)
context = hostsz.deserialize_context(encrypted_context)
self.assertEqual(context.get('instance_id'), self.uuid)
context.pop('instance_id')
self.assertDictEqual(context, self.context)
| {
"content_hash": "e25c94358b7da037fa0620cb38e354d2",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 76,
"avg_line_length": 40.255319148936174,
"alnum_prop": 0.6664904862579282,
"repo_name": "openstack/trove",
"id": "cc4faea15fa1c84fbe6dfa8a21198c56df084ccb",
"size": "4414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trove/tests/unittests/common/test_conductor_serializer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1166"
},
{
"name": "Python",
"bytes": "3667406"
},
{
"name": "Shell",
"bytes": "136049"
}
],
"symlink_target": ""
} |
"""
The cutoffs module gives a few different options for smoothly sending the GP
kernel to zero near the boundary of the cutoff sphere.
"""
from math import cos, sin, pi
from numba import njit
@njit
def hard_cutoff(r_cut: float, ri: float, ci: float):
"""A hard cutoff that assigns a value of 1 to all interatomic distances.
Args:
r_cut (float): Cutoff value (in angstrom).
ri (float): Interatomic distance.
ci (float): Cartesian coordinate divided by the distance.
Returns:
(float, float): Cutoff value and its derivative.
"""
return 1, 0
@njit
def quadratic_cutoff_bound(r_cut: float, ri: float, ci: float):
"""A quadratic cutoff that goes to zero smoothly at the cutoff boundary.
Args:
r_cut (float): Cutoff value (in angstrom).
ri (float): Interatomic distance.
ci (float): Cartesian coordinate divided by the distance.
Returns:
(float, float): Cutoff value and its derivative.
"""
if r_cut > ri:
rdiff = r_cut - ri
fi = rdiff * rdiff
fdi = 2 * rdiff * ci
else:
fi = 0
fdi = 0
return fi, fdi
@njit
def quadratic_cutoff(r_cut: float, ri: float, ci: float):
"""A quadratic cutoff that goes to zero smoothly at the cutoff boundary.
Args:
r_cut (float): Cutoff value (in angstrom).
ri (float): Interatomic distance.
ci (float): Cartesian coordinate divided by the distance.
Returns:
(float, float): Cutoff value and its derivative.
"""
rdiff = r_cut - ri
fi = rdiff * rdiff
fdi = 2 * rdiff * ci
return fi, fdi
@njit
def cubic_cutoff(r_cut: float, ri: float, ci: float):
"""A cubic cutoff that goes to zero smoothly at the cutoff boundary.
Args:
r_cut (float): Cutoff value (in angstrom).
ri (float): Interatomic distance.
ci (float): Cartesian coordinate divided by the distance.
Returns:
(float, float): Cutoff value and its derivative.
"""
rdiff = r_cut - ri
fi = rdiff * rdiff * rdiff
fdi = 3 * rdiff * rdiff * ci
return fi, fdi
@njit
def cosine_cutoff(r_cut: float, ri: float, ci: float, d: float = 1):
"""A cosine cutoff that returns 1 up to r_cut - d, and assigns a cosine
envelope to values of r between r_cut - d and r_cut. Based on Eq. 24 of
Albert P. Bartók and Gábor Csányi. "Gaussian approximation potentials: A
brief tutorial introduction." International Journal of Quantum Chemistry
115.16 (2015): 1051-1057.
Args:
r_cut (float): Cutoff value (in angstrom).
ri (float): Interatomic distance.
ci (float): Cartesian coordinate divided by the distance.
Returns:
(float, float): Cutoff value and its derivative.
"""
if ri > r_cut - d:
fi = (1 / 2) * (cos(pi * (ri - r_cut + d) / d) + 1)
fdi = (pi / (2 * d)) * sin(pi * (r_cut - ri) / d) * ci
else:
fi = 1
fdi = 0
return fi, fdi
| {
"content_hash": "bf00337920e97051fc7655e3da55c055",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 76,
"avg_line_length": 26.821428571428573,
"alnum_prop": 0.6091877496671105,
"repo_name": "mir-group/flare",
"id": "c64cf01e845d3ffa28ae9e030ee2e463c56ae649",
"size": "3007",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flare/kernels/cutoffs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "89901"
},
{
"name": "Python",
"bytes": "1372138"
}
],
"symlink_target": ""
} |
"""
pygments.formatters.terminal256
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for 256-color terminal output with ANSI sequences.
RGB-to-XTERM color conversion routines adapted from xterm256-conv
tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
by Wolfgang Frisch.
Formatter version 1.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# TODO:
# - Options to map style's bold/underline/italic/border attributes
# to some ANSI attrbutes (something like 'italic=underline')
# - An option to output "style RGB to xterm RGB/index" conversion table
# - An option to indicate that we are running in "reverse background"
# xterm. This means that default colors are white-on-black, not
# black-on-while, so colors like "white background" need to be converted
# to "white background, black foreground", etc...
import sys
from pygments.formatter import Formatter
from pygments.console import codes
from pygments.style import ansicolors
__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
class EscapeSequence:
def __init__(self, fg=None, bg=None, bold=False, underline=False):
self.fg = fg
self.bg = bg
self.bold = bold
self.underline = underline
def escape(self, attrs):
if len(attrs):
return "\x1b[" + ";".join(attrs) + "m"
return ""
def color_string(self):
attrs = []
if self.fg is not None:
if self.fg in ansicolors:
esc = codes[self.fg[5:]]
if ';01m' in esc:
self.bold = True
# extract fg color code.
attrs.append(esc[2:4])
else:
attrs.extend(("38", "5", "%i" % self.fg))
if self.bg is not None:
if self.bg in ansicolors:
esc = codes[self.bg[5:]]
# extract fg color code, add 10 for bg.
attrs.append(str(int(esc[2:4])+10))
else:
attrs.extend(("48", "5", "%i" % self.bg))
if self.bold:
attrs.append("01")
if self.underline:
attrs.append("04")
return self.escape(attrs)
def true_color_string(self):
attrs = []
if self.fg:
attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
if self.bg:
attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
if self.bold:
attrs.append("01")
if self.underline:
attrs.append("04")
return self.escape(attrs)
def reset_string(self):
attrs = []
if self.fg is not None:
attrs.append("39")
if self.bg is not None:
attrs.append("49")
if self.bold or self.underline:
attrs.append("00")
return self.escape(attrs)
class Terminal256Formatter(Formatter):
"""
Format tokens with ANSI color sequences, for output in a 256-color
terminal or console. Like in `TerminalFormatter` color sequences
are terminated at newlines, so that paging the output works correctly.
The formatter takes colors from a style defined by the `style` option
and converts them to nearest ANSI 256-color escape sequences. Bold and
underline attributes from the style are preserved (and displayed).
.. versionadded:: 0.9
.. versionchanged:: 2.2
If the used style defines foreground colors in the form ``#ansi*``, then
`Terminal256Formatter` will map these to non extended foreground color.
See :ref:`AnsiTerminalStyle` for more information.
Options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
"""
name = 'Terminal256'
aliases = ['terminal256', 'console256', '256']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.xterm_colors = []
self.best_match = {}
self.style_string = {}
self.usebold = 'nobold' not in options
self.useunderline = 'nounderline' not in options
self._build_color_table() # build an RGB-to-256 color conversion table
self._setup_styles() # convert selected style's colors to term. colors
def _build_color_table(self):
# colors 0..15: 16 basic colors
self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
# colors 16..232: the 6x6x6 color cube
valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
for i in range(217):
r = valuerange[(i // 36) % 6]
g = valuerange[(i // 6) % 6]
b = valuerange[i % 6]
self.xterm_colors.append((r, g, b))
# colors 233..253: grayscale
for i in range(1, 22):
v = 8 + i * 10
self.xterm_colors.append((v, v, v))
def _closest_color(self, r, g, b):
distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
match = 0
for i in range(0, 254):
values = self.xterm_colors[i]
rd = r - values[0]
gd = g - values[1]
bd = b - values[2]
d = rd*rd + gd*gd + bd*bd
if d < distance:
match = i
distance = d
return match
def _color_index(self, color):
index = self.best_match.get(color, None)
if color in ansicolors:
# strip the `#ansi` part and look up code
index = color
self.best_match[color] = index
if index is None:
try:
rgb = int(str(color), 16)
except ValueError:
rgb = 0
r = (rgb >> 16) & 0xff
g = (rgb >> 8) & 0xff
b = rgb & 0xff
index = self._closest_color(r, g, b)
self.best_match[color] = index
return index
def _setup_styles(self):
for ttype, ndef in self.style:
escape = EscapeSequence()
# get foreground from ansicolor if set
if ndef['ansicolor']:
escape.fg = self._color_index(ndef['ansicolor'])
elif ndef['color']:
escape.fg = self._color_index(ndef['color'])
if ndef['bgansicolor']:
escape.bg = self._color_index(ndef['bgansicolor'])
elif ndef['bgcolor']:
escape.bg = self._color_index(ndef['bgcolor'])
if self.usebold and ndef['bold']:
escape.bold = True
if self.useunderline and ndef['underline']:
escape.underline = True
self.style_string[str(ttype)] = (escape.color_string(),
escape.reset_string())
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
# use that to avoid unicode encode problems
if not self.encoding and hasattr(outfile, "encoding") and \
hasattr(outfile, "isatty") and outfile.isatty() and \
sys.version_info < (3,):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
for ttype, value in tokensource:
not_found = True
while ttype and not_found:
try:
# outfile.write( "<" + str(ttype) + ">" )
on, off = self.style_string[str(ttype)]
# Like TerminalFormatter, add "reset colors" escape sequence
# on newline.
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(on + line + off)
outfile.write('\n')
if spl[-1]:
outfile.write(on + spl[-1] + off)
not_found = False
# outfile.write( '#' + str(ttype) + '#' )
except KeyError:
# ottype = ttype
ttype = ttype[:-1]
# outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
if not_found:
outfile.write(value)
class TerminalTrueColorFormatter(Terminal256Formatter):
r"""
Format tokens with ANSI color sequences, for output in a true-color
terminal or console. Like in `TerminalFormatter` color sequences
are terminated at newlines, so that paging the output works correctly.
.. versionadded:: 2.1
Options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
"""
name = 'TerminalTrueColor'
aliases = ['terminal16m', 'console16m', '16m']
filenames = []
def _build_color_table(self):
pass
def _color_tuple(self, color):
try:
rgb = int(str(color), 16)
except ValueError:
return None
r = (rgb >> 16) & 0xff
g = (rgb >> 8) & 0xff
b = rgb & 0xff
return (r, g, b)
def _setup_styles(self):
for ttype, ndef in self.style:
escape = EscapeSequence()
if ndef['color']:
escape.fg = self._color_tuple(ndef['color'])
if ndef['bgcolor']:
escape.bg = self._color_tuple(ndef['bgcolor'])
if self.usebold and ndef['bold']:
escape.bold = True
if self.useunderline and ndef['underline']:
escape.underline = True
self.style_string[str(ttype)] = (escape.true_color_string(),
escape.reset_string())
| {
"content_hash": "3cd435ef664d9b5df7205321ce7bf2d2",
"timestamp": "",
"source": "github",
"line_count": 308,
"max_line_length": 88,
"avg_line_length": 34.90909090909091,
"alnum_prop": 0.5424107142857143,
"repo_name": "uncled1023/pygments",
"id": "5110bc9ee8466e1829e9dfe5d71fcca2ecee4288",
"size": "10776",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Pygments/pygments-lib/pygments/formatters/terminal256.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2099"
},
{
"name": "C#",
"bytes": "18871"
},
{
"name": "Python",
"bytes": "3406159"
}
],
"symlink_target": ""
} |
import itertools
from metrika.contender import Contender
from metrika.variable import *
__author__ = 'Javier Pimás'
class Suite:
def __init__(self, name=""):
self.name = name
self.variables = []
def add_variable_from_dict(self, name, values):
values = [NamedValue(name, value) for name, value in values.items()]
self.variables.append(Variable(name, values))
def add_variable_from_list(self, name, values):
values = [AnonValue(value) for value in values]
self.variables.append(Variable(name, values))
def restrict(self, arguments):
# for arg in arguments:
# if self.typical_parameters[arg] is None:
# args.append(arg)
# else:
# args.append(self.typical_parameters[arg])
if arguments.restrict is not None:
for restriction in arguments.restrict.split(','):
(var, value) = restriction.split('=')
variable = next(x for x in self.variables if x.name == var)
variable.restrict_to(value)
def instances(self):
names = [var.name for var in self.variables]
values = [var.values for var in self.variables]
tuples = itertools.product(*values)
return [Contender(names, variation) for variation in list(tuples)]
| {
"content_hash": "80e65e072b2e60875bf6bee7e62fbe7c",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 76,
"avg_line_length": 33.375,
"alnum_prop": 0.6134831460674157,
"repo_name": "dc-uba/metrika",
"id": "6e119e6795a7e4f6ac78b9a7e1190210d6853844",
"size": "1361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metrika/suite.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41446"
}
],
"symlink_target": ""
} |
"""
Scraper for requesting, caching and parsing house.mo.gov lists.
"""
from scraper.interpreters import BaseInterpreter, HouseBillPageContentInterpreter
from scraper.url_helpers import house_url
class HouseBillPageInterpreter(BaseInterpreter):
def __init__(self, url):
self._actions = None
self._bill_content_interpreter = None
super(HouseBillPageInterpreter, self).__init__(
url=url)
@property
def bill_content_page_url(self):
return self.get_first_url_containing("BillContent")
@property
def bill_content_interpreter(self):
if not self._bill_content_interpreter:
self._bill_content_interpreter = HouseBillPageContentInterpreter(
house_url(self.bill_content_page_url))
return self._bill_content_interpreter
@property
def attributes(self):
return self.bill_content_interpreter.attributes
@property
def actions(self):
if not self._actions:
self._actions = self.bill_content_interpreter.actions
return self._actions
| {
"content_hash": "bb5e3ee7c7745e80ee4c61a722bf260e",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 81,
"avg_line_length": 30.083333333333332,
"alnum_prop": 0.6795937211449676,
"repo_name": "access-missouri/am-django-project",
"id": "8e5cb2d6ada6f650af17bfb535db64e6f58cb692",
"size": "1129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "am/scraper/interpreters/HouseBillPageInterpreter.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "208381"
},
{
"name": "HTML",
"bytes": "75543"
},
{
"name": "JavaScript",
"bytes": "68836"
},
{
"name": "Makefile",
"bytes": "803"
},
{
"name": "Python",
"bytes": "241729"
},
{
"name": "Ruby",
"bytes": "105"
}
],
"symlink_target": ""
} |
from muckrake.services.background_thread import BackgroundThreadService
import json
class VerifiableProducer(BackgroundThreadService):
logs = {
"producer_log": {
"path": "/mnt/producer.log",
"collect_default": True}
}
def __init__(self, context, num_nodes, kafka, topic, max_messages=-1, throughput=100000):
super(VerifiableProducer, self).__init__(context, num_nodes)
self.kafka = kafka
self.topic = topic
self.max_messages = max_messages
self.throughput = throughput
self.acked_values = []
self.not_acked_values = []
def _worker(self, idx, node):
cmd = self.start_cmd
self.logger.debug("Verbose producer %d command: %s" % (idx, cmd))
for line in node.account.ssh_capture(cmd):
line = line.strip()
data = self.try_parse_json(line)
if data is not None:
self.logger.debug("VerifiableProducer: " + str(data))
with self.lock:
if data["name"] == "producer_send_error":
data["node"] = idx
self.not_acked_values.append(int(data["value"]))
elif data["name"] == "producer_send_success":
self.acked_values.append(int(data["value"]))
@property
def start_cmd(self):
cmd = "/opt/kafka/bin/kafka-verifiable-producer.sh" \
" --topic %s --broker-list %s" % (self.topic, self.kafka.bootstrap_servers())
if self.max_messages > 0:
cmd += " --max-messages %s" % str(self.max_messages)
if self.throughput > 0:
cmd += " --throughput %s" % str(self.throughput)
cmd += " 2>> /mnt/producer.log | tee -a /mnt/producer.log &"
return cmd
@property
def acked(self):
with self.lock:
return self.acked_values
@property
def not_acked(self):
with self.lock:
return self.not_acked_values
@property
def num_acked(self):
with self.lock:
return len(self.acked_values)
@property
def num_not_acked(self):
with self.lock:
return len(self.not_acked_values)
def stop_node(self, node):
node.account.kill_process("VerifiableProducer", allow_fail=False)
# block until the corresponding thread exits
if len(self.worker_threads) >= self.idx(node):
# Need to guard this because stop is preemptively called before the worker threads are added and started
self.worker_threads[self.idx(node) - 1].join()
def clean_node(self, node):
node.account.ssh("rm -rf /mnt/producer.log")
def try_parse_json(self, string):
"""Try to parse a string as json. Return None if not parseable."""
try:
record = json.loads(string)
return record
except ValueError:
self.logger.debug("Could not parse as json: %s" % str(string))
return None
| {
"content_hash": "59d5a751cda7e64b5dde94b50cb779d2",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 116,
"avg_line_length": 32.308510638297875,
"alnum_prop": 0.5706289101086599,
"repo_name": "edenhill/muckrake",
"id": "cedf3ece93953ae268fc90fd6f44d42865d389b2",
"size": "3612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "muckrake/services/verifiable_producer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "142006"
},
{
"name": "Shell",
"bytes": "28531"
}
],
"symlink_target": ""
} |
"""
Removes entity mentions of zero length from mentions
that survived all pre-processing stages of AFET.
Divide sanitized mentions into train/dev/test split.
"""
import sys
import json
import random
from datetime import datetime
# Test start number according to mention_type_test.txt
TSN = {}
TSN['BBN'] = 86078
TSN['OntoNotes'] = 219634
TSN['Wiki'] = 2677780
def generate_sanitized_mentions(dataset, data_directory, dev_percentage, sanitized_directory):
"""
Generate and save sanitized mention set.
"""
sanitized_mentions = {}
sanitized_mentions['train'] = []
sanitized_mentions['test'] = []
sanitized_mentions['dev'] = []
with open(data_directory + dataset + '/mention.txt', 'r') as file_p:
for row in file_p:
if '-1' not in row:
parts = row.split('\t')
if int(parts[1]) >= TSN[dataset]:
sanitized_mentions['test'].append(parts[0])
else:
sanitized_mentions['train'].append(parts[0])
dev_choices = len(sanitized_mentions['test']) // dev_percentage
# time = datetime.now().microsecond
# hard coded seed values to replicate the same dev/test split.
if dataset == 'BBN':
time = 833365
else:
time = 536254
print("Using seed", time)
random.seed(time)
sanitized_mentions['dev'] = set(random.sample(sanitized_mentions['test'], dev_choices))
sanitized_mentions['test'] = set(sanitized_mentions['test'])
sanitized_mentions['train'] = set(sanitized_mentions['train'])
sanitized_mentions['test'] = sanitized_mentions['test'].difference(sanitized_mentions['dev'])
with open(sanitized_directory + dataset + '/sanitized_mention_dev.txt', 'w') as file_p:
file_p.write('\n'.join(sorted(list(sanitized_mentions['dev']))))
with open(sanitized_directory + dataset + '/sanitized_mention_test.txt', 'w') as file_p:
file_p.write('\n'.join(sorted(list(sanitized_mentions['test']))))
with open(sanitized_directory + dataset + '/sanitized_mention_train.txt', 'w') as file_p:
file_p.write('\n'.join(sorted(list(sanitized_mentions['train']))))
return sanitized_mentions
def generate_label_set(dataset, data_directory, sanitized_directory):
"""
Generate and save list of unique labels used in a file.
"""
file_path = data_directory + dataset + '/train_new.json'
unique_labels = set()
with open(file_path, 'r') as file_p:
for row in file_p:
data = json.loads(row)
for mention in data['mentions']:
labels = mention['labels']
unique_labels.update(labels)
with open(sanitized_directory + dataset + '/sanitized_labels.txt', 'w') as file_p:
file_p.write('\n'.join(sorted(list(unique_labels))))
return unique_labels
def generate_pos_and_dep_set(dataset, data_directory, sanitized_directory):
"""
Generate and save list of unique pos tag and dep type used in a file.
"""
file_path = data_directory + dataset + '/train_new.json'
unique_pos = set()
unique_dep_type = set()
with open(file_path, 'r') as file_p:
for row in file_p:
data = json.loads(row)
unique_pos.update(data['pos'])
for dep in data['dep']:
unique_dep_type.add(dep['type'])
with open(sanitized_directory + dataset + '/sanitized_pos.txt', 'w') as file_p:
file_p.write('\n'.join(sorted(list(unique_pos))))
with open(sanitized_directory + dataset + '/sanitized_dep_type.txt', 'w') as file_p:
file_p.write('\n'.join(sorted(list(unique_dep_type))))
def sanitize(file_path, mention_set, output_file_path, label_set):
"""
Sanitize data.
"""
file_p_new_json = open(output_file_path, 'w')
used_mentions = set()
with open(file_path, 'r') as file_p:
for row in file_p:
data = json.loads(row)
new_mentions = []
for mention in data['mentions']:
uid = '_'.join([data['fileid'],
str(data['senid']),
str(mention['start']),
str(mention['end'])
])
new_labels = set()
if uid in mention_set and uid not in used_mentions:
for label in mention['labels']:
if label in label_set:
new_labels.add(label)
mention['labels'] = list(new_labels)
new_mentions.append(dict(mention))
used_mentions.add(uid)
new_row = {}
new_row['tokens'] = data['tokens']
new_row['pos'] = data['pos']
new_row['dep'] = data['dep']
new_row['mentions'] = new_mentions
new_row['senid'] = data['senid']
new_row['fileid'] = data['fileid']
json.dump(new_row, file_p_new_json)
file_p_new_json.write('\n')
file_p_new_json.close()
if __name__ == '__main__':
if len(sys.argv) != 5:
print('Usage: dataset data_directory dev_percentage sanitize_directory')
sys.exit(0)
else:
print('Generating entity mentions.')
SM = generate_sanitized_mentions(sys.argv[1], sys.argv[2], int(sys.argv[3]), sys.argv[4])
print('Generating label set.')
UL = generate_label_set(sys.argv[1], sys.argv[2], sys.argv[4])
print('Generating pos and dep types.')
generate_pos_and_dep_set(sys.argv[1], sys.argv[2], sys.argv[4])
print('Sanitizing training data.')
sanitize(sys.argv[2] + sys.argv[1] + '/train_new.json',
SM['train'],
sys.argv[4] + sys.argv[1] + '/sanitized_train.json',
UL
)
print('Sanitizing testing data.')
sanitize(sys.argv[2] + sys.argv[1] + '/test_new.json',
SM['dev'],
sys.argv[4] + sys.argv[1] + '/sanitized_dev.json',
UL
)
print('Sanitizing development data.')
sanitize(sys.argv[2] + sys.argv[1] + '/test_new.json',
SM['test'],
sys.argv[4] + sys.argv[1] + '/sanitized_test.json',
UL
)
| {
"content_hash": "42d7fa6bc273acbccd3ade9c83763ff8",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 97,
"avg_line_length": 40.76129032258064,
"alnum_prop": 0.5620449509338398,
"repo_name": "abhipec/fnet",
"id": "1efe363ab08a971450159c42f6f11b82a434ab8c",
"size": "6318",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/data_processing/sanitizer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "18547"
},
{
"name": "HTML",
"bytes": "5068669"
},
{
"name": "Python",
"bytes": "138203"
},
{
"name": "Shell",
"bytes": "6827"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class GaugeValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="gauge", parent_name="indicator", **kwargs):
super(GaugeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Gauge"),
data_docs=kwargs.pop(
"data_docs",
"""
axis
:class:`plotly.graph_objects.indicator.gauge.Ax
is` instance or dict with compatible properties
bar
Set the appearance of the gauge's value
bgcolor
Sets the gauge background color.
bordercolor
Sets the color of the border enclosing the
gauge.
borderwidth
Sets the width (in px) of the border enclosing
the gauge.
shape
Set the shape of the gauge
steps
A tuple of :class:`plotly.graph_objects.indicat
or.gauge.Step` instances or dicts with
compatible properties
stepdefaults
When used in a template (as layout.template.dat
a.indicator.gauge.stepdefaults), sets the
default property values to use for elements of
indicator.gauge.steps
threshold
:class:`plotly.graph_objects.indicator.gauge.Th
reshold` instance or dict with compatible
properties
""",
),
**kwargs
)
| {
"content_hash": "389ee752878293a4150fd6b9d46dedc4",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 79,
"avg_line_length": 37.93181818181818,
"alnum_prop": 0.5344517675254643,
"repo_name": "plotly/python-api",
"id": "ea8857002d14898cbf7d10cf8e49d0808c01c8e1",
"size": "1669",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/indicator/_gauge.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
import os
import sys
from astroid import MANAGER, builder
from astroid.bases import BUILTINS
DATA_DIR = os.path.join("testdata", "python3")
RESOURCE_PATH = os.path.join(os.path.dirname(__file__), DATA_DIR, "data")
def find(name):
return os.path.normpath(os.path.join(os.path.dirname(__file__), DATA_DIR, name))
def build_file(path, modname=None):
return builder.AstroidBuilder().file_build(find(path), modname)
class SysPathSetup:
def setUp(self):
sys.path.insert(0, find(""))
def tearDown(self):
del sys.path[0]
datadir = find("")
for key in list(sys.path_importer_cache):
if key.startswith(datadir):
del sys.path_importer_cache[key]
class AstroidCacheSetupMixin:
"""Mixin for handling the astroid cache problems.
When clearing the astroid cache, some tests fails due to
cache inconsistencies, where some objects had a different
builtins object referenced.
This saves the builtins module and makes sure to add it
back to the astroid_cache after the tests finishes.
The builtins module is special, since some of the
transforms for a couple of its objects (str, bytes etc)
are executed only once, so astroid_bootstrapping will be
useless for retrieving the original builtins module.
"""
@classmethod
def setup_class(cls):
cls._builtins = MANAGER.astroid_cache.get(BUILTINS)
@classmethod
def teardown_class(cls):
if cls._builtins:
MANAGER.astroid_cache[BUILTINS] = cls._builtins
| {
"content_hash": "c9097ac3adcf04cb0b13107fa5729f94",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 84,
"avg_line_length": 29.903846153846153,
"alnum_prop": 0.6861736334405145,
"repo_name": "ruchee/vimrc",
"id": "20adc2f804aa5ca53d42d9a6a7488526e06e3601",
"size": "2180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vimfiles/bundle/vim-python/submodules/astroid/tests/resources.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "22028"
},
{
"name": "Blade",
"bytes": "3314"
},
{
"name": "C#",
"bytes": "1734"
},
{
"name": "CSS",
"bytes": "31547"
},
{
"name": "Clojure",
"bytes": "47036"
},
{
"name": "CoffeeScript",
"bytes": "9274"
},
{
"name": "Common Lisp",
"bytes": "54314"
},
{
"name": "D",
"bytes": "11562"
},
{
"name": "Dockerfile",
"bytes": "7620"
},
{
"name": "Elixir",
"bytes": "41696"
},
{
"name": "Emacs Lisp",
"bytes": "10489"
},
{
"name": "Erlang",
"bytes": "137788"
},
{
"name": "F#",
"bytes": "2230"
},
{
"name": "Go",
"bytes": "54655"
},
{
"name": "HTML",
"bytes": "178954"
},
{
"name": "Haml",
"bytes": "39"
},
{
"name": "Haskell",
"bytes": "2031"
},
{
"name": "JavaScript",
"bytes": "9086"
},
{
"name": "Julia",
"bytes": "9540"
},
{
"name": "Kotlin",
"bytes": "8669"
},
{
"name": "Less",
"bytes": "327"
},
{
"name": "Makefile",
"bytes": "87500"
},
{
"name": "Mustache",
"bytes": "3375"
},
{
"name": "Nix",
"bytes": "1860"
},
{
"name": "PHP",
"bytes": "9238"
},
{
"name": "PLpgSQL",
"bytes": "33747"
},
{
"name": "Perl",
"bytes": "84200"
},
{
"name": "PostScript",
"bytes": "3891"
},
{
"name": "Python",
"bytes": "7366233"
},
{
"name": "Racket",
"bytes": "1150"
},
{
"name": "Raku",
"bytes": "21146"
},
{
"name": "Ruby",
"bytes": "133344"
},
{
"name": "SCSS",
"bytes": "327"
},
{
"name": "Sass",
"bytes": "308"
},
{
"name": "Scala",
"bytes": "13125"
},
{
"name": "Shell",
"bytes": "52916"
},
{
"name": "Smarty",
"bytes": "300"
},
{
"name": "Swift",
"bytes": "11436"
},
{
"name": "TypeScript",
"bytes": "4663"
},
{
"name": "Vim Script",
"bytes": "10545492"
},
{
"name": "Vim Snippet",
"bytes": "559139"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import unittest
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.dygraph.nn import Embedding
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import Adam
from paddle.fluid.dygraph.base import to_variable
from test_imperative_base import new_program_scope
from paddle.fluid.executor import global_scope
import numpy as np
import six
import pickle
class SimpleLSTMRNN(fluid.Layer):
def __init__(self,
name_scope,
hidden_size,
num_steps,
num_layers=2,
init_scale=0.1,
dropout=None):
super(SimpleLSTMRNN, self).__init__(name_scope)
self._hidden_size = hidden_size
self._num_layers = num_layers
self._init_scale = init_scale
self._dropout = dropout
self._input = None
self._num_steps = num_steps
self.cell_array = []
self.hidden_array = []
def _build_once(self, input_embedding, init_hidden=None, init_cell=None):
self.weight_1_arr = []
self.weight_2_arr = []
self.bias_arr = []
self.mask_array = []
for i in range(self._num_layers):
weight_1 = self.create_parameter(
attr=fluid.ParamAttr(
initializer=fluid.initializer.UniformInitializer(
low=-self._init_scale, high=self._init_scale)),
shape=[self._hidden_size * 2, self._hidden_size * 4],
dtype="float32",
default_initializer=fluid.initializer.UniformInitializer(
low=-self._init_scale, high=self._init_scale))
self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
bias_1 = self.create_parameter(
attr=fluid.ParamAttr(
initializer=fluid.initializer.UniformInitializer(
low=-self._init_scale, high=self._init_scale)),
shape=[self._hidden_size * 4],
dtype="float32",
default_initializer=fluid.initializer.Constant(0.0))
self.bias_arr.append(self.add_parameter('b_%d' % i, bias_1))
def forward(self, input_embedding, init_hidden=None, init_cell=None):
self.cell_array = []
self.hidden_array = []
for i in range(self._num_layers):
pre_hidden = fluid.layers.slice(
init_hidden, axes=[0], starts=[i], ends=[i + 1])
pre_cell = fluid.layers.slice(
init_cell, axes=[0], starts=[i], ends=[i + 1])
pre_hidden = fluid.layers.reshape(
pre_hidden, shape=[-1, self._hidden_size])
pre_cell = fluid.layers.reshape(
pre_cell, shape=[-1, self._hidden_size])
self.hidden_array.append(pre_hidden)
self.cell_array.append(pre_cell)
res = []
for index in range(self._num_steps):
self._input = fluid.layers.slice(
input_embedding, axes=[1], starts=[index], ends=[index + 1])
self._input = fluid.layers.reshape(
self._input, shape=[-1, self._hidden_size])
for k in range(self._num_layers):
pre_hidden = self.hidden_array[k]
pre_cell = self.cell_array[k]
weight_1 = self.weight_1_arr[k]
bias = self.bias_arr[k]
nn = fluid.layers.concat([self._input, pre_hidden], 1)
gate_input = fluid.layers.matmul(x=nn, y=weight_1)
gate_input = fluid.layers.elementwise_add(gate_input, bias)
i, j, f, o = fluid.layers.split(
gate_input, num_or_sections=4, dim=-1)
c = pre_cell * fluid.layers.sigmoid(f) + fluid.layers.sigmoid(
i) * fluid.layers.tanh(j)
m = fluid.layers.tanh(c) * fluid.layers.sigmoid(o)
self.hidden_array[k] = m
self.cell_array[k] = c
self._input = m
if self._dropout is not None and self._dropout > 0.0:
self._input = fluid.layers.dropout(
self._input,
dropout_prob=self._dropout,
dropout_implementation='upscale_in_train')
res.append(
fluid.layers.reshape(
self._input, shape=[1, -1, self._hidden_size]))
real_res = fluid.layers.concat(res, 0)
real_res = fluid.layers.transpose(x=real_res, perm=[1, 0, 2])
last_hidden = fluid.layers.concat(self.hidden_array, 1)
last_hidden = fluid.layers.reshape(
last_hidden, shape=[-1, self._num_layers, self._hidden_size])
last_hidden = fluid.layers.transpose(x=last_hidden, perm=[1, 0, 2])
last_cell = fluid.layers.concat(self.cell_array, 1)
last_cell = fluid.layers.reshape(
last_cell, shape=[-1, self._num_layers, self._hidden_size])
last_cell = fluid.layers.transpose(x=last_cell, perm=[1, 0, 2])
return real_res, last_hidden, last_cell
class PtbModel(fluid.Layer):
def __init__(self,
name_scope,
hidden_size,
vocab_size,
num_layers=2,
num_steps=20,
init_scale=0.1,
dropout=None):
super(PtbModel, self).__init__(name_scope)
self.hidden_size = hidden_size
self.vocab_size = vocab_size
self.init_scale = init_scale
self.num_layers = num_layers
self.num_steps = num_steps
self.dropout = dropout
self.simple_lstm_rnn = SimpleLSTMRNN(
self.full_name(),
hidden_size,
num_steps,
num_layers=num_layers,
init_scale=init_scale,
dropout=dropout)
self.embedding = Embedding(
self.full_name(),
size=[vocab_size, hidden_size],
dtype='float32',
is_sparse=False,
param_attr=fluid.ParamAttr(
name='embedding_para',
initializer=fluid.initializer.UniformInitializer(
low=-init_scale, high=init_scale)))
self.softmax_weight = self.create_parameter(
attr=fluid.ParamAttr(),
shape=[self.hidden_size, self.vocab_size],
dtype="float32",
default_initializer=fluid.initializer.UniformInitializer(
low=-self.init_scale, high=self.init_scale))
self.softmax_bias = self.create_parameter(
attr=fluid.ParamAttr(),
shape=[self.vocab_size],
dtype="float32",
default_initializer=fluid.initializer.UniformInitializer(
low=-self.init_scale, high=self.init_scale))
def forward(self, input, label, init_hidden, init_cell):
init_h = fluid.layers.reshape(
init_hidden, shape=[self.num_layers, -1, self.hidden_size])
init_c = fluid.layers.reshape(
init_cell, shape=[self.num_layers, -1, self.hidden_size])
x_emb = self.embedding(input)
x_emb = fluid.layers.reshape(
x_emb, shape=[-1, self.num_steps, self.hidden_size])
if self.dropout is not None and self.dropout > 0.0:
x_emb = fluid.layers.dropout(
x_emb,
dropout_prob=self.drop_out,
dropout_implementation='upscale_in_train')
rnn_out, last_hidden, last_cell = self.simple_lstm_rnn(x_emb, init_h,
init_c)
rnn_out = fluid.layers.reshape(
rnn_out, shape=[-1, self.num_steps, self.hidden_size])
projection = fluid.layers.matmul(rnn_out, self.softmax_weight)
projection = fluid.layers.elementwise_add(projection, self.softmax_bias)
projection = fluid.layers.reshape(
projection, shape=[-1, self.vocab_size])
loss = fluid.layers.softmax_with_cross_entropy(
logits=projection, label=label, soft_label=False)
loss = fluid.layers.reshape(loss, shape=[-1, self.num_steps])
loss = fluid.layers.reduce_mean(loss, dim=[0])
loss = fluid.layers.reduce_sum(loss)
loss.permissions = True
return loss, last_hidden, last_cell
class TestSaveLoadBase(unittest.TestCase):
def test_ptb_rnn_cpu_float32(self):
seed = 90
hidden_size = 10
vocab_size = 1000
num_layers = 1
num_steps = 3
init_scale = 0.1
batch_size = 4
batch_num = 200
with new_program_scope():
fluid.default_startup_program().random_seed = seed
fluid.default_main_program().random_seed = seed
ptb_model = PtbModel(
"ptb_model",
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
num_steps=num_steps,
init_scale=init_scale)
place = fluid.CPUPlace() if not core.is_compiled_with_cuda(
) else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
sgd = Adam(learning_rate=1e-3)
x = fluid.layers.data(
name="x", shape=[-1, num_steps, 1], dtype='int64')
y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
init_hidden = fluid.layers.data(
name="init_hidden", shape=[1], dtype='float32')
init_cell = fluid.layers.data(
name="init_cell", shape=[1], dtype='float32')
static_loss, static_last_hidden, static_last_cell = ptb_model(
x, y, init_hidden, init_cell)
sgd.minimize(static_loss)
static_param_updated = dict()
static_param_init = dict()
out = exe.run(framework.default_startup_program())
static_loss_value = None
static_last_cell_value = None
static_last_hidden_value = None
for i in range(batch_num):
x_data = np.arange(12).reshape(4, 3).astype('int64')
y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
x_data = x_data.reshape((-1, num_steps, 1))
y_data = y_data.reshape((-1, 1))
init_hidden_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
init_cell_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
fetch_list = [static_loss, static_last_hidden, static_last_cell]
out = exe.run(fluid.default_main_program(),
feed={
"x": x_data,
"y": y_data,
"init_hidden": init_hidden_data,
"init_cell": init_cell_data
},
fetch_list=fetch_list)
static_loss_value = out[0]
static_last_hidden_value = out[1]
static_last_cell_value = out[2]
# get value before save
main_program = framework.default_main_program()
base_map = {}
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been update
self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t
fluid.save(main_program, "./test_1")
# set var to zero
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
ten = fluid.global_scope().find_var(var.name).get_tensor()
ten.set(np.zeros_like(np.array(ten)), place)
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load(main_program, "./test_1", exe)
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
base_t = base_map[var.name]
self.assertTrue(np.array_equal(new_t, base_t))
class TestSaveLoadPartial(unittest.TestCase):
def test_ptb_rnn_cpu_float32(self):
seed = 90
hidden_size = 10
vocab_size = 1000
num_layers = 1
num_steps = 3
init_scale = 0.1
batch_size = 4
batch_num = 200
with new_program_scope():
fluid.default_startup_program().random_seed = seed
fluid.default_main_program().random_seed = seed
ptb_model = PtbModel(
"ptb_model",
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
num_steps=num_steps,
init_scale=init_scale)
place = fluid.CPUPlace() if not core.is_compiled_with_cuda(
) else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
sgd = Adam(learning_rate=1e-3)
x = fluid.layers.data(
name="x", shape=[-1, num_steps, 1], dtype='int64')
y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
init_hidden = fluid.layers.data(
name="init_hidden", shape=[1], dtype='float32')
init_cell = fluid.layers.data(
name="init_cell", shape=[1], dtype='float32')
static_loss, static_last_hidden, static_last_cell = ptb_model(
x, y, init_hidden, init_cell)
test_program = fluid.default_main_program().clone(for_test=True)
add_1 = fluid.layers.fc(static_last_hidden,
size=hidden_size,
num_flatten_dims=2,
bias_attr=False)
sgd.minimize(static_loss)
static_param_updated = dict()
static_param_init = dict()
out = exe.run(framework.default_startup_program())
static_loss_value = None
static_last_cell_value = None
static_last_hidden_value = None
for i in range(batch_num):
x_data = np.arange(12).reshape(4, 3).astype('int64')
y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
x_data = x_data.reshape((-1, num_steps, 1))
y_data = y_data.reshape((-1, 1))
init_hidden_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
init_cell_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
fetch_list = [static_loss, static_last_hidden, static_last_cell]
out = exe.run(fluid.default_main_program(),
feed={
"x": x_data,
"y": y_data,
"init_hidden": init_hidden_data,
"init_cell": init_cell_data
},
fetch_list=fetch_list)
static_loss_value = out[0]
static_last_hidden_value = out[1]
static_last_cell_value = out[2]
# get value before save
main_program = framework.default_main_program()
base_map = {}
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been update
self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t
fluid.save(main_program, "./test_1")
# set var to zero
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
ten = fluid.global_scope().find_var(var.name).get_tensor()
ten.set(np.zeros_like(np.array(ten)), place)
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load(test_program, "./test_1", None)
for var in test_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
print(var.name)
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
base_t = base_map[var.name]
self.assertTrue(np.array_equal(new_t, base_t))
class TestSaveLoadSetStateDict(unittest.TestCase):
def test_ptb_rnn_cpu_float32(self):
seed = 90
hidden_size = 10
vocab_size = 1000
num_layers = 1
num_steps = 3
init_scale = 0.1
batch_size = 4
batch_num = 200
with new_program_scope():
fluid.default_startup_program().random_seed = seed
fluid.default_main_program().random_seed = seed
ptb_model = PtbModel(
"ptb_model",
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
num_steps=num_steps,
init_scale=init_scale)
place = fluid.CPUPlace() if not core.is_compiled_with_cuda(
) else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
sgd = Adam(learning_rate=1e-3)
x = fluid.layers.data(
name="x", shape=[-1, num_steps, 1], dtype='int64')
y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
init_hidden = fluid.layers.data(
name="init_hidden", shape=[1], dtype='float32')
init_cell = fluid.layers.data(
name="init_cell", shape=[1], dtype='float32')
static_loss, static_last_hidden, static_last_cell = ptb_model(
x, y, init_hidden, init_cell)
sgd.minimize(static_loss)
static_param_updated = dict()
static_param_init = dict()
out = exe.run(framework.default_startup_program())
static_loss_value = None
static_last_cell_value = None
static_last_hidden_value = None
for i in range(batch_num):
x_data = np.arange(12).reshape(4, 3).astype('int64')
y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
x_data = x_data.reshape((-1, num_steps, 1))
y_data = y_data.reshape((-1, 1))
init_hidden_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
init_cell_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
fetch_list = [static_loss, static_last_hidden, static_last_cell]
out = exe.run(fluid.default_main_program(),
feed={
"x": x_data,
"y": y_data,
"init_hidden": init_hidden_data,
"init_cell": init_cell_data
},
fetch_list=fetch_list)
static_loss_value = out[0]
static_last_hidden_value = out[1]
static_last_cell_value = out[2]
# get value before save
main_program = framework.default_main_program()
base_map = {}
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been update
self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t
fluid.save(main_program, "./test_1")
# set var to zero
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
ten = fluid.global_scope().find_var(var.name).get_tensor()
ten.set(np.zeros_like(np.array(ten)), place)
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0)
fluid.load(main_program, "./test_1", exe)
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
base_t = base_map[var.name]
self.assertTrue(np.array_equal(new_t, base_t))
class TestProgramStatePartial(unittest.TestCase):
def test_ptb_rnn_cpu_float32(self):
seed = 90
hidden_size = 10
vocab_size = 1000
num_layers = 1
num_steps = 3
init_scale = 0.1
batch_size = 4
batch_num = 200
with new_program_scope():
fluid.default_startup_program().random_seed = seed
fluid.default_main_program().random_seed = seed
ptb_model = PtbModel(
"ptb_model",
hidden_size=hidden_size,
vocab_size=vocab_size,
num_layers=num_layers,
num_steps=num_steps,
init_scale=init_scale)
place = fluid.CPUPlace() if not core.is_compiled_with_cuda(
) else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
sgd = Adam(learning_rate=1e-3)
x = fluid.layers.data(
name="x", shape=[-1, num_steps, 1], dtype='int64')
y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
init_hidden = fluid.layers.data(
name="init_hidden", shape=[1], dtype='float32')
init_cell = fluid.layers.data(
name="init_cell", shape=[1], dtype='float32')
static_loss, static_last_hidden, static_last_cell = ptb_model(
x, y, init_hidden, init_cell)
test_program = fluid.default_main_program().clone(for_test=True)
add_1 = fluid.layers.fc(static_last_hidden,
size=hidden_size,
num_flatten_dims=2,
bias_attr=False)
sgd.minimize(static_loss)
static_param_updated = dict()
static_param_init = dict()
out = exe.run(framework.default_startup_program())
static_loss_value = None
static_last_cell_value = None
static_last_hidden_value = None
for i in range(batch_num):
x_data = np.arange(12).reshape(4, 3).astype('int64')
y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
x_data = x_data.reshape((-1, num_steps, 1))
y_data = y_data.reshape((-1, 1))
init_hidden_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
init_cell_data = np.zeros(
(num_layers, batch_size, hidden_size), dtype='float32')
fetch_list = [static_loss, static_last_hidden, static_last_cell]
out = exe.run(fluid.default_main_program(),
feed={
"x": x_data,
"y": y_data,
"init_hidden": init_hidden_data,
"init_cell": init_cell_data
},
fetch_list=fetch_list)
static_loss_value = out[0]
static_last_hidden_value = out[1]
static_last_cell_value = out[2]
# get value before save
main_program = framework.default_main_program()
base_map = {}
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been update
self.assertTrue(np.sum(np.abs(t)) != 0)
base_map[var.name] = t
fluid.save(main_program, "./test_1")
# set var to zero
for var in main_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
ten = fluid.global_scope().find_var(var.name).get_tensor()
ten.set(np.zeros_like(np.array(ten)), place)
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been set to zero
self.assertTrue(np.sum(np.abs(new_t)) == 0)
#fluid.load(test_program, "./test_1", None )
program_state = fluid.load_program_state("./test_1")
fluid.set_program_state(test_program, program_state)
for var in test_program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
print(var.name)
new_t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
base_t = base_map[var.name]
self.assertTrue(np.array_equal(new_t, base_t))
class TestVariableInit(unittest.TestCase):
def test_variable_init(self):
x = fluid.data(name="x", shape=[10, 10], dtype='float32')
y = fluid.layers.fc(x, 10)
z = fluid.layers.fc(y, 10)
place = fluid.CPUPlace() if not core.is_compiled_with_cuda(
) else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
fluid.save(fluid.default_main_program(), "./test_path")
def set_var(var, ndarray):
t = var.get_tensor()
p = t._place()
if p.is_cpu_place():
place = paddle.fluid.CPUPlace()
elif p.is_cuda_pinned_place():
place = paddle.fluid.CUDAPinnedPlace()
else:
p = paddle.fluid.core.Place()
p.set_place(t._place())
place = paddle.fluid.CUDAPlace(p.gpu_device_id())
t.set(ndarray, place)
program = fluid.default_main_program()
new_scope = fluid.core.Scope()
place = fluid.CPUPlace() if not core.is_compiled_with_cuda(
) else fluid.CUDAPlace(0)
exe = fluid.Executor(place)
parameter_list = list(
filter(fluid.io.is_parameter, program.list_vars()))
fluid.core._create_loaded_parameter(parameter_list, new_scope,
exe._default_executor)
parameter_file_name = "./test_path.pdparams"
with open(parameter_file_name, 'rb') as f:
load_dict = pickle.load(f)
for v in parameter_list:
assert v.name in load_dict, \
"Can not find [{}] in model file [{}]".format(
v.name, parameter_file_name)
new_v = new_scope.find_var(v.name)
set_var(new_v, load_dict[v.name])
opt_list = list(
filter(fluid.io.is_belong_to_optimizer, program.list_vars()))
fluid.core._create_loaded_parameter(opt_list, new_scope,
exe._default_executor)
opt_file_name = "./test_path.pdopt"
with open(opt_file_name, 'rb') as f:
load_dict = pickle.load(f)
for v in opt_list:
assert v.name in load_dict, \
"Can not find [{}] in model file [{}]".format(
v.name, opt_file_name)
new_v = new_scope.find_var(v.name)
set_var(new_v, load_dict[v.name])
base_map = {}
for var in program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
t = np.array(fluid.global_scope().find_var(var.name)
.get_tensor())
# make sure all the paramerter or optimzier var have been update
base_map[var.name] = t
for var in program.list_vars():
if isinstance(var, framework.Parameter) or var.persistable:
new_t = np.array(new_scope.find_var(var.name).get_tensor())
base_t = base_map[var.name]
self.assertTrue(np.array_equal(new_t, base_t))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "4b456ba7e72f2e29c3d560ddc997d823",
"timestamp": "",
"source": "github",
"line_count": 710,
"max_line_length": 89,
"avg_line_length": 42.7112676056338,
"alnum_prop": 0.5100412201154163,
"repo_name": "chengduoZH/Paddle",
"id": "7cfd9bd5a2777d1b7810be3bd0a2f60572342df4",
"size": "30938",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_static_save_load.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32490"
},
{
"name": "C++",
"bytes": "10146609"
},
{
"name": "CMake",
"bytes": "291349"
},
{
"name": "Cuda",
"bytes": "1192566"
},
{
"name": "Dockerfile",
"bytes": "10002"
},
{
"name": "Python",
"bytes": "7124331"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "200906"
}
],
"symlink_target": ""
} |
from webob.request import Request
from webob.dec import wsgify
from webob.exc import no_escape
from webob.exc import strip_tags
from webob.exc import HTTPException
from webob.exc import WSGIHTTPException
from webob.exc import _HTTPMove
from webob.exc import HTTPMethodNotAllowed
from webob.exc import HTTPExceptionMiddleware
from webob.exc import status_map
from nose.tools import eq_, ok_, assert_equal, assert_raises
@wsgify
def method_not_allowed_app(req):
if req.method != 'GET':
raise HTTPMethodNotAllowed()
return 'hello!'
def test_noescape_null():
assert_equal(no_escape(None), '')
def test_noescape_not_basestring():
assert_equal(no_escape(42), '42')
def test_noescape_unicode():
class DummyUnicodeObject(object):
def __unicode__(self):
return '42'
duo = DummyUnicodeObject()
assert_equal(no_escape(duo), '42')
def test_strip_tags_empty():
assert_equal(strip_tags(''), '')
def test_strip_tags_newline_to_space():
assert_equal(strip_tags('a\nb'), 'a b')
def test_strip_tags_zaps_carriage_return():
assert_equal(strip_tags('a\rb'), 'ab')
def test_strip_tags_br_to_newline():
assert_equal(strip_tags('a<br/>b'), 'a\nb')
def test_strip_tags_zaps_comments():
assert_equal(strip_tags('a<!--b-->'), 'ab')
def test_strip_tags_zaps_tags():
assert_equal(strip_tags('foo<bar>baz</bar>'), 'foobaz')
def test_HTTPException():
import warnings
_called = []
_result = object()
def _response(environ, start_response):
_called.append((environ, start_response))
return _result
environ = {}
start_response = object()
exc = HTTPException('testing', _response)
ok_(exc.wsgi_response is _response)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
assert(exc.exception is exc)
assert(len(w) == 1)
result = exc(environ, start_response)
ok_(result is result)
assert_equal(_called, [(environ, start_response)])
def test_exception_with_unicode_data():
req = Request.blank('/', method='POST')
res = req.get_response(method_not_allowed_app)
assert res.status_code == 405
def test_WSGIHTTPException_headers():
exc = WSGIHTTPException(headers=[('Set-Cookie', 'a=1'),
('Set-Cookie', 'a=2')])
mixed = exc.headers.mixed()
assert mixed['set-cookie'] == ['a=1', 'a=2']
def test_WSGIHTTPException_w_body_template():
from string import Template
TEMPLATE = '$foo: $bar'
exc = WSGIHTTPException(body_template = TEMPLATE)
assert_equal(exc.body_template, TEMPLATE)
ok_(isinstance(exc.body_template_obj, Template))
eq_(exc.body_template_obj.substitute({'foo': 'FOO', 'bar': 'BAR'}),
'FOO: BAR')
def test_WSGIHTTPException_w_empty_body():
class EmptyOnly(WSGIHTTPException):
empty_body = True
exc = EmptyOnly(content_type='text/plain', content_length=234)
ok_('content_type' not in exc.__dict__)
ok_('content_length' not in exc.__dict__)
def test_WSGIHTTPException___str__():
exc1 = WSGIHTTPException(detail='Detail')
eq_(str(exc1), 'Detail')
class Explain(WSGIHTTPException):
explanation = 'Explanation'
eq_(str(Explain()), 'Explanation')
def test_WSGIHTTPException_plain_body_no_comment():
class Explain(WSGIHTTPException):
code = '999'
title = 'Testing'
explanation = 'Explanation'
exc = Explain(detail='Detail')
eq_(exc.plain_body({}),
'999 Testing\n\nExplanation\n\n Detail ')
def test_WSGIHTTPException_html_body_w_comment():
class Explain(WSGIHTTPException):
code = '999'
title = 'Testing'
explanation = 'Explanation'
exc = Explain(detail='Detail', comment='Comment')
eq_(exc.html_body({}),
'<html>\n'
' <head>\n'
' <title>999 Testing</title>\n'
' </head>\n'
' <body>\n'
' <h1>999 Testing</h1>\n'
' Explanation<br /><br />\n'
'Detail\n'
'<!-- Comment -->\n\n'
' </body>\n'
'</html>'
)
def test_WSGIHTTPException_generate_response():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'PUT',
'HTTP_ACCEPT': 'text/html'
}
excep = WSGIHTTPException()
assert_equal( excep(environ,start_response), [
b'<html>\n'
b' <head>\n'
b' <title>None None</title>\n'
b' </head>\n'
b' <body>\n'
b' <h1>None None</h1>\n'
b' <br /><br />\n'
b'\n'
b'\n\n'
b' </body>\n'
b'</html>' ]
)
def test_WSGIHTTPException_call_w_body():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'PUT'
}
excep = WSGIHTTPException()
excep.body = b'test'
assert_equal( excep(environ,start_response), [b'test'] )
def test_WSGIHTTPException_wsgi_response():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD'
}
excep = WSGIHTTPException()
assert_equal( excep.wsgi_response(environ,start_response), [] )
def test_WSGIHTTPException_exception_newstyle():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD'
}
excep = WSGIHTTPException()
from webob import exc
exc.newstyle_exceptions = True
assert_equal( excep(environ,start_response), [] )
def test_WSGIHTTPException_exception_no_newstyle():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD'
}
excep = WSGIHTTPException()
from webob import exc
exc.newstyle_exceptions = False
assert_equal( excep(environ,start_response), [] )
def test_HTTPOk_head_of_proxied_head():
# first set up a response to a HEAD request
HELLO_WORLD = "Hi!\n"
CONTENT_TYPE = "application/hello"
def head_app(environ, start_response):
"""An application object that understands HEAD"""
status = '200 OK'
response_headers = [('Content-Type', CONTENT_TYPE),
('Content-Length', len(HELLO_WORLD))]
start_response(status, response_headers)
if environ['REQUEST_METHOD'] == 'HEAD':
return []
else:
return [HELLO_WORLD]
def verify_response(resp, description):
assert_equal(resp.content_type, CONTENT_TYPE, description)
assert_equal(resp.content_length, len(HELLO_WORLD), description)
assert_equal(resp.body, b'', description)
req = Request.blank('/', method='HEAD')
resp1 = req.get_response(head_app)
verify_response(resp1, "first response")
# Copy the response like a proxy server would.
# Copying an empty body has set content_length
# so copy the headers only afterwards.
resp2 = status_map[resp1.status_int](request=req)
resp2.body = resp1.body
resp2.headerlist = resp1.headerlist
verify_response(resp2, "copied response")
# evaluate it again
resp3 = req.get_response(resp2)
verify_response(resp3, "evaluated copy")
def test_HTTPMove():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD',
'PATH_INFO': '/',
}
m = _HTTPMove()
assert_equal( m( environ, start_response ), [] )
def test_HTTPMove_location_not_none():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD',
'PATH_INFO': '/',
}
m = _HTTPMove(location='http://example.com')
assert_equal( m( environ, start_response ), [] )
def test_HTTPMove_add_slash_and_location():
def start_response(status, headers, exc_info=None):
pass
assert_raises( TypeError, _HTTPMove, location='http://example.com',
add_slash=True )
def test_HTTPMove_call_add_slash():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD',
'PATH_INFO': '/',
}
m = _HTTPMove()
m.add_slash = True
assert_equal( m( environ, start_response ), [] )
def test_HTTPMove_call_query_string():
def start_response(status, headers, exc_info=None):
pass
environ = {
'wsgi.url_scheme': 'HTTP',
'SERVER_NAME': 'localhost',
'SERVER_PORT': '80',
'REQUEST_METHOD': 'HEAD'
}
m = _HTTPMove()
m.add_slash = True
environ[ 'QUERY_STRING' ] = 'querystring'
environ['PATH_INFO'] = '/'
assert_equal( m( environ, start_response ), [] )
def test_HTTPExceptionMiddleware_ok():
def app( environ, start_response ):
return '123'
application = app
m = HTTPExceptionMiddleware(application)
environ = {}
start_response = None
res = m( environ, start_response )
assert_equal( res, '123' )
def test_HTTPExceptionMiddleware_exception():
def wsgi_response( environ, start_response):
return '123'
def app( environ, start_response ):
raise HTTPException( None, wsgi_response )
application = app
m = HTTPExceptionMiddleware(application)
environ = {}
start_response = None
res = m( environ, start_response )
assert_equal( res, '123' )
def test_HTTPExceptionMiddleware_exception_exc_info_none():
class DummySys:
def exc_info(self):
return None
def wsgi_response( environ, start_response):
return start_response('200 OK', [], exc_info=None)
def app( environ, start_response ):
raise HTTPException( None, wsgi_response )
application = app
m = HTTPExceptionMiddleware(application)
environ = {}
def start_response(status, headers, exc_info):
pass
try:
from webob import exc
old_sys = exc.sys
sys = DummySys()
res = m( environ, start_response )
assert_equal( res, None )
finally:
exc.sys = old_sys
| {
"content_hash": "81d341ecf0c1f914c112c03e62296abc",
"timestamp": "",
"source": "github",
"line_count": 352,
"max_line_length": 72,
"avg_line_length": 30.696022727272727,
"alnum_prop": 0.6090698750578436,
"repo_name": "ioram7/keystone-federado-pgid2013",
"id": "ba4edeebc90d58e4cf1b26b5a5aecb2b4f41b4e7",
"size": "10805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/WebOb/tests/test_exc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1841"
},
{
"name": "C",
"bytes": "10584735"
},
{
"name": "C++",
"bytes": "19231"
},
{
"name": "CSS",
"bytes": "172341"
},
{
"name": "JavaScript",
"bytes": "530938"
},
{
"name": "Python",
"bytes": "26306359"
},
{
"name": "Shell",
"bytes": "38138"
},
{
"name": "XSLT",
"bytes": "306125"
}
],
"symlink_target": ""
} |
from distutils.core import setup
with open('README') as file:
long_description = file.read()
setup(
name='Pizco',
version='0.1',
description='Python remote objects with ZMQ',
long_description=long_description,
author='Hernan E. Grecco',
author_email='hernan.grecco@gmail.com',
url='https://github.com/hgrecco/pizco',
packages=['pizco'],
package_data={},
extras_require = {
'pyzmq': ['pyzmq'],
},
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
])
| {
"content_hash": "24548ee5b820b525040bd6dee3a03dd1",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 51,
"avg_line_length": 32.72222222222222,
"alnum_prop": 0.6001697792869269,
"repo_name": "hgrecco/pizco",
"id": "dfb394616458aedd145bb685e84d0ea20fe9f0bb",
"size": "1201",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "976"
},
{
"name": "Python",
"bytes": "84308"
},
{
"name": "Shell",
"bytes": "5094"
}
],
"symlink_target": ""
} |
from google.appengine.ext import ndb
class Article(ndb.Model):
title = ndb.StringProperty()
author = ndb.StringProperty()
tags = ndb.StringProperty(repeated=True)
def print_author_tags():
query = Article.query()
articles = query.fetch(20, projection=[Article.author, Article.tags])
for article in articles:
print(article.author)
print(article.tags)
# article.title will raise a ndb.UnprojectedPropertyError
class Address(ndb.Model):
type = ndb.StringProperty() # E.g., 'home', 'work'
street = ndb.StringProperty()
city = ndb.StringProperty()
class Contact(ndb.Model):
name = ndb.StringProperty()
addresses = ndb.StructuredProperty(Address, repeated=True)
def fetch_sub_properties():
Contact.query().fetch(projection=["name", "addresses.city"])
Contact.query().fetch(projection=[Contact.name, Contact.addresses.city])
def demonstrate_ndb_grouping():
Article.query(projection=[Article.author], group_by=[Article.author])
Article.query(projection=[Article.author], distinct=True)
class Foo(ndb.Model):
A = ndb.IntegerProperty(repeated=True)
B = ndb.StringProperty(repeated=True)
def declare_multiple_valued_property():
entity = Foo(A=[1, 1, 2, 3], B=['x', 'y', 'x'])
return entity
| {
"content_hash": "f893fbd0c33ee9e6f3a37a9c138679d9",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 76,
"avg_line_length": 27.5531914893617,
"alnum_prop": 0.6911196911196911,
"repo_name": "amboutin/GCP",
"id": "fb0c325435110539682d49fab0edc84025162ff2",
"size": "1892",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "appengine/standard/ndb/projection_queries/snippets.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2924"
},
{
"name": "HTML",
"bytes": "24309"
},
{
"name": "JavaScript",
"bytes": "11222"
},
{
"name": "Makefile",
"bytes": "881"
},
{
"name": "Protocol Buffer",
"bytes": "10818"
},
{
"name": "Python",
"bytes": "1185674"
},
{
"name": "Shell",
"bytes": "8369"
}
],
"symlink_target": ""
} |
from django import forms
# import autocomplete_light
from .models import Organisation, User
class OrganisationForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(OrganisationForm, self).__init__(*args, **kwargs)
class Meta:
model = Organisation
exclude = ('user', 'created_at', 'modified_at',
'active', 'created_by', 'modified_by')
# widgets = {
# 'name': autocomplete_light.TextWidget('OrganisationAutocomplete'),
# }
class OrganisationMemberAddForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(OrganisationMemberAddForm, self).__init__(*args, **kwargs)
class Meta:
model = Organisation
exclude = ('user', 'created_at', 'modified_at',
'name', 'organisation_type', 'description',
'location', 'organisation_role',
'active', 'created_by', 'modified_by')
existing_user = forms.ModelChoiceField(queryset=User.objects.all(), required=False)
new_user = forms.EmailField(label='Invite New User', required=False)
class UserRegistrationForm(forms.ModelForm):
"""
Form class for completing a user's registration and activating the
User.
The class operates on a user model which is assumed to have the required
fields of a BaseUserModel
"""
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=30)
password = forms.CharField(max_length=30, widget=forms.PasswordInput)
password_confirm = forms.CharField(max_length=30,
widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.initial['username'] = ''
class Meta:
model = User
exclude = ('is_staff', 'is_superuser', 'is_active', 'last_login',
'date_joined', 'groups', 'user_permissions')
| {
"content_hash": "7f4b65e05b6250fcd0ef1db783009cd8",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 87,
"avg_line_length": 33.69491525423729,
"alnum_prop": 0.6167002012072434,
"repo_name": "shankig/wye",
"id": "2b2d30374feb60d26b49b7057358cf9ed4fa37f4",
"size": "1988",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wye/organisations/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19955"
},
{
"name": "HTML",
"bytes": "226222"
},
{
"name": "JavaScript",
"bytes": "26324"
},
{
"name": "Python",
"bytes": "105039"
},
{
"name": "Shell",
"bytes": "248"
}
],
"symlink_target": ""
} |
import configparser
import hashlib
import os
import platform
import random
import tempfile
from typing import Any, Dict, Optional
# download delegate
__wget__ = ("WGET_PATH",)
__transmission__ = (
"TRANSMISSION_RPC_URL",
"TRANSMISSION_RPC_PORT",
"TRANSMISSION_RPC_USERNAME",
"TRANSMISSION_RPC_PASSWORD",
)
__qbittorrent__ = (
"QBITTORRENT_HOST",
"QBITTORRENT_PORT",
"QBITTORRENT_USERNAME",
"QBITTORRENT_PASSWORD",
"QBITTORRENT_CATEGORY",
)
__aria2__ = (
"ARIA2_RPC_URL",
"ARIA2_RPC_TOKEN",
)
__deluge__ = ("DELUGE_RPC_URL", "DELUGE_RPC_PASSWORD")
__download_delegate__ = (
__wget__ + __aria2__ + __transmission__ + __deluge__ + __qbittorrent__
)
# fake __all__
__all__ = (
"BANGUMI_MOE_URL",
"SAVE_PATH",
"DOWNLOAD_DELEGATE",
"MAX_PAGE",
"DATA_SOURCE",
"TMP_PATH",
"DANMAKU_API_URL",
"LANG",
"FRONT_STATIC_PATH",
"ADMIN_TOKEN",
"SHARE_DMHY_URL",
"GLOBAL_FILTER",
"ENABLE_GLOBAL_FILTER",
"TORNADO_SERVE_STATIC_FILES",
)
# cannot be rewrite
__readonly__ = (
"BGMI_PATH",
"DB_PATH",
"CONFIG_FILE_PATH",
"TOOLS_PATH",
"SCRIPT_PATH",
"SCRIPT_DB_PATH",
"FRONT_STATIC_PATH",
)
# writeable
__writeable__ = tuple(i for i in __all__ if i not in __readonly__)
# the real __all__
__all__ = __all__ + __download_delegate__ + __readonly__ # type: ignore
DOWNLOAD_DELEGATE_MAP = {
"rr!": __wget__,
"aria2-rpc": __aria2__,
"transmission-rpc": __transmission__,
"deluge-rpc": __deluge__,
"qbittorrent-webapi": __qbittorrent__,
}
if not os.environ.get("BGMI_PATH"): # pragma: no cover
if platform.system() == "Windows":
BGMI_PATH = os.path.join(
os.environ.get("USERPROFILE", tempfile.gettempdir()), ".bgmi"
)
if not BGMI_PATH:
raise SystemExit
else:
BGMI_PATH = os.path.join(os.environ.get("HOME", "/tmp"), ".bgmi")
else:
BGMI_PATH = os.environ["BGMI_PATH"]
DB_PATH = os.path.join(BGMI_PATH, "bangumi.db")
CONFIG_FILE_PATH = os.path.join(BGMI_PATH, "bgmi.cfg")
SCRIPT_DB_PATH = os.path.join(BGMI_PATH, "script.db")
SCRIPT_PATH = os.path.join(BGMI_PATH, "scripts")
TOOLS_PATH = os.path.join(BGMI_PATH, "tools")
def read_config() -> None:
c = configparser.ConfigParser()
if not os.path.exists(CONFIG_FILE_PATH):
write_default_config()
return
c.read(CONFIG_FILE_PATH, encoding="utf-8")
for i in __writeable__:
if c.has_option("bgmi", i):
v: Any = c.get("bgmi", i)
if i == "MAX_PAGE":
v = int(v)
globals().update({i: v})
for i in DOWNLOAD_DELEGATE_MAP.get(DOWNLOAD_DELEGATE, []):
if c.has_option(DOWNLOAD_DELEGATE, i):
globals().update({i: c.get(DOWNLOAD_DELEGATE, i)})
def print_config() -> Optional[str]:
c = configparser.ConfigParser()
if not os.path.exists(CONFIG_FILE_PATH):
return None
c.read(CONFIG_FILE_PATH, encoding="utf-8")
string = ""
string += "[bgmi]\n"
for i in __writeable__:
string += "{}={}\n".format(i, c.get("bgmi", i))
string += f"\n[{DOWNLOAD_DELEGATE}]\n"
for i in DOWNLOAD_DELEGATE_MAP.get(DOWNLOAD_DELEGATE, []):
string += f"{i}={c.get(DOWNLOAD_DELEGATE, i)}\n"
return string
def write_default_config() -> None:
c = configparser.ConfigParser()
if not c.has_section("bgmi"):
c.add_section("bgmi")
for k in __writeable__:
v = globals().get(k, "0")
if k == "ADMIN_TOKEN" and v is None:
v = hashlib.md5(str(random.random()).encode("utf-8")).hexdigest()
c.set("bgmi", k, str(v))
if not c.has_section(DOWNLOAD_DELEGATE):
c.add_section(DOWNLOAD_DELEGATE)
for k in DOWNLOAD_DELEGATE_MAP.get(DOWNLOAD_DELEGATE, []):
v = globals().get(k, None)
c.set(DOWNLOAD_DELEGATE, k, v)
try:
with open(CONFIG_FILE_PATH, "w+", encoding="utf-8") as f:
c.write(f)
except OSError:
print("[-] Error writing to config file and ignored")
def write_config(
config: Optional[str] = None, value: Optional[str] = None
) -> Dict[str, Any]:
if not os.path.exists(CONFIG_FILE_PATH):
write_default_config()
return {
"status": "error",
"message": "Config file does not exists, writing default config file",
"data": [],
}
c = configparser.ConfigParser()
c.read(CONFIG_FILE_PATH, encoding="utf-8")
result = {} # type: Dict[str, Any]
try:
if config is None:
result = {"status": "info", "message": print_config()}
elif value is None: # config(config, None)
result = {"status": "info"}
if config in __download_delegate__:
result["message"] = "{}={}".format(
config, c.get(DOWNLOAD_DELEGATE, config)
)
else:
result["message"] = "{}={}".format(config, c.get("bgmi", config))
else: # config(config, Value)
if config in __writeable__:
c.set("bgmi", config, value)
with open(CONFIG_FILE_PATH, "w", encoding="utf-8") as f:
c.write(f)
read_config()
if config == "DOWNLOAD_DELEGATE" and not c.has_section(
DOWNLOAD_DELEGATE
):
c.add_section(DOWNLOAD_DELEGATE)
for i in DOWNLOAD_DELEGATE_MAP.get(DOWNLOAD_DELEGATE, []):
v = globals().get(i, "")
c.set(DOWNLOAD_DELEGATE, i, v)
with open(CONFIG_FILE_PATH, "w", encoding="utf-8") as f:
c.write(f)
result = {
"status": "success",
"message": f"{config} has been set to {value}",
}
elif config in DOWNLOAD_DELEGATE_MAP.get(DOWNLOAD_DELEGATE, []):
c.set(DOWNLOAD_DELEGATE, config, value)
with open(CONFIG_FILE_PATH, "w", encoding="utf-8") as f:
c.write(f)
result = {
"status": "success",
"message": f"{config} has been set to {value}",
}
else:
result = {
"status": "error",
"message": f"{config} does not exist or not writeable",
}
except configparser.NoOptionError:
write_default_config()
result = {
"status": "error",
"message": "Error in config file, try rerun `bgmi config`",
}
result["data"] = [
{"writable": True, "name": x, "value": globals()[x]} for x in __writeable__
] + [{"writable": False, "name": x, "value": globals()[x]} for x in __readonly__]
return result
# --------- Writeable ---------- #
# Setting bangumi.moe url
BANGUMI_MOE_URL = "https://bangumi.moe"
# Setting share.dmhy.org url
SHARE_DMHY_URL = "https://share.dmhy.org"
# Setting bangumi.moe url
DATA_SOURCE = "bangumi_moe"
# BGmi user path
SAVE_PATH = os.path.join(BGMI_PATH, "bangumi")
FRONT_STATIC_PATH = os.path.join(BGMI_PATH, "front_static")
# admin token
ADMIN_TOKEN = None
# temp path
TMP_PATH = os.path.join(BGMI_PATH, "tmp")
# log path
LOG_PATH = os.path.join(TMP_PATH, "bgmi.log")
# Download delegate
DOWNLOAD_DELEGATE = "aria2-rpc"
# danmaku api url, https://github.com/DIYgod/DPlayer#related-projects
DANMAKU_API_URL = ""
# language
LANG = "zh_cn"
# max page
MAX_PAGE = 3
# aria2
ARIA2_RPC_URL = "http://localhost:6800/rpc"
ARIA2_RPC_TOKEN = "token:"
# deluge
DELUGE_RPC_URL = "http://127.0.0.1:8112/json"
DELUGE_RPC_PASSWORD = "deluge"
# path of wget
WGET_PATH = "/usr/bin/wget"
# transmission-rpc
TRANSMISSION_RPC_URL = "127.0.0.1"
TRANSMISSION_RPC_PORT = "9091"
TRANSMISSION_RPC_USERNAME = "your_username"
TRANSMISSION_RPC_PASSWORD = "your_password"
# qbittorrent-webapi
QBITTORRENT_HOST = "127.0.0.1"
QBITTORRENT_PORT = "8080"
QBITTORRENT_USERNAME = "admin"
QBITTORRENT_PASSWORD = "adminadmin"
QBITTORRENT_CATEGORY = ""
# tag of bangumi on bangumi.moe
BANGUMI_TAG = "549ef207fe682f7549f1ea90"
# Global blocked keyword
GLOBAL_FILTER = "Leopard-Raws, hevc, x265, c-a Raws, U3-Web"
# enable global filter
ENABLE_GLOBAL_FILTER = "1"
# use tornado serving video files
TORNADO_SERVE_STATIC_FILES = "0"
# ------------------------------ #
# !!! Read config from file and write to globals() !!!
read_config()
# ------------------------------ #
# will be used in other other models
__all_writable_now__ = __writeable__ + DOWNLOAD_DELEGATE_MAP.get(DOWNLOAD_DELEGATE, ())
# --------- Read-Only ---------- #
# platform
IS_WINDOWS = platform.system() == "Windows"
if __name__ == "__main__":
write_default_config()
| {
"content_hash": "53617c121e0c2197ea802fefd8ea98b6",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 87,
"avg_line_length": 27.50778816199377,
"alnum_prop": 0.5629671574178935,
"repo_name": "codysk/BGmi",
"id": "433d25cbf72398c9a6cf1aa7bcec4ea3030fc83a",
"size": "8830",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bgmi/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "181419"
},
{
"name": "Shell",
"bytes": "4874"
},
{
"name": "VBScript",
"bytes": "163"
}
],
"symlink_target": ""
} |
# Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.util import orderdict
from slicc.util import PairContainer
from slicc.symbols.Symbol import Symbol
from slicc.symbols.Var import Var
class DataMember(PairContainer):
def __init__(self, ident, type, pairs, init_code):
super(DataMember, self).__init__(pairs)
self.ident = ident
self.type = type
self.init_code = init_code
class Enumeration(PairContainer):
def __init__(self, ident, pairs):
super(Enumeration, self).__init__(pairs)
self.ident = ident
class Type(Symbol):
def __init__(self, table, ident, location, pairs, machine=None):
super(Type, self).__init__(table, ident, location, pairs)
self.c_ident = ident
self.abstract_ident = ""
if machine:
if self.isExternal or self.isPrimitive:
if "external_name" in self:
self.c_ident = self["external_name"]
else:
# Append with machine name
self.c_ident = "%s_%s" % (machine, ident)
self.pairs.setdefault("desc", "No description avaliable")
# check for interface that this Type implements
if "interface" in self:
interface = self["interface"]
if interface in ("Message"):
self["message"] = "yes"
# FIXME - all of the following id comparisons are fragile hacks
if self.ident in ("CacheMemory"):
self["cache"] = "yes"
if self.ident in ("TBETable"):
self["tbe"] = "yes"
if self.ident == "TimerTable":
self["timer"] = "yes"
if self.ident == "DirectoryMemory":
self["dir"] = "yes"
if self.ident == "PersistentTable":
self["persistent"] = "yes"
if self.ident == "Prefetcher":
self["prefetcher"] = "yes"
self.isMachineType = (ident == "MachineType")
self.isStateDecl = ("state_decl" in self)
self.statePermPairs = []
self.data_members = orderdict()
self.methods = {}
self.enums = orderdict()
@property
def isPrimitive(self):
return "primitive" in self
@property
def isMessage(self):
return "message" in self
@property
def isBuffer(self):
return "buffer" in self
@property
def isInPort(self):
return "inport" in self
@property
def isOutPort(self):
return "outport" in self
@property
def isEnumeration(self):
return "enumeration" in self
@property
def isExternal(self):
return "external" in self
@property
def isGlobal(self):
return "global" in self
@property
def isInterface(self):
return "interface" in self
# Return false on error
def addDataMember(self, ident, type, pairs, init_code):
if ident in self.data_members:
return False
member = DataMember(ident, type, pairs, init_code)
self.data_members[ident] = member
var = Var(self.symtab, ident, self.location, type,
"m_%s" % ident, {}, None)
self.symtab.registerSym(ident, var)
return True
def dataMemberType(self, ident):
return self.data_members[ident].type
def methodId(self, name, param_type_vec):
return '_'.join([name] + [ pt.c_ident for pt in param_type_vec ])
def methodIdAbstract(self, name, param_type_vec):
return '_'.join([name] + [ pt.abstract_ident for pt in param_type_vec ])
def statePermPairAdd(self, state_name, perm_name):
self.statePermPairs.append([state_name, perm_name])
def addFunc(self, func):
ident = self.methodId(func.ident, func.param_types)
if ident in self.methods:
return False
self.methods[ident] = func
return True
def addEnum(self, ident, pairs):
if ident in self.enums:
return False
self.enums[ident] = Enumeration(ident, pairs)
# Add default
if "default" not in self:
self["default"] = "%s_NUM" % self.c_ident
return True
def writeCodeFiles(self, path, includes):
if self.isExternal:
# Do nothing
pass
elif self.isEnumeration:
self.printEnumHH(path)
self.printEnumCC(path)
else:
# User defined structs and messages
self.printTypeHH(path)
self.printTypeCC(path)
def printTypeHH(self, path):
code = self.symtab.codeFormatter()
code('''
/** \\file ${{self.c_ident}}.hh
*
*
* Auto generated C++ code started by $__file__:$__line__
*/
#ifndef __${{self.c_ident}}_HH__
#define __${{self.c_ident}}_HH__
#include <iostream>
#include "mem/ruby/slicc_interface/RubySlicc_Util.hh"
''')
for dm in self.data_members.values():
if not dm.type.isPrimitive:
code('#include "mem/protocol/$0.hh"', dm.type.c_ident)
parent = ""
if "interface" in self:
code('#include "mem/protocol/$0.hh"', self["interface"])
parent = " : public %s" % self["interface"]
code('''
$klass ${{self.c_ident}}$parent
{
public:
${{self.c_ident}}
''', klass="class")
if self.isMessage:
code('(Tick curTime) : %s(curTime) {' % self["interface"])
else:
code('()\n\t\t{')
code.indent()
if not self.isGlobal:
code.indent()
for dm in self.data_members.values():
ident = dm.ident
if "default" in dm:
# look for default value
code('m_$ident = ${{dm["default"]}}; // default for this field')
elif "default" in dm.type:
# Look for the type default
tid = dm.type.c_ident
code('m_$ident = ${{dm.type["default"]}}; // default value of $tid')
else:
code('// m_$ident has no default')
code.dedent()
code('}')
# ******** Copy constructor ********
if not self.isGlobal:
code('${{self.c_ident}}(const ${{self.c_ident}}&other)')
# Call superclass constructor
if "interface" in self:
code(' : ${{self["interface"]}}(other)')
code('{')
code.indent()
for dm in self.data_members.values():
code('m_${{dm.ident}} = other.m_${{dm.ident}};')
code.dedent()
code('}')
# ******** Full init constructor ********
if not self.isGlobal:
params = [ 'const %s& local_%s' % (dm.type.c_ident, dm.ident) \
for dm in self.data_members.itervalues() ]
params = ', '.join(params)
if self.isMessage:
params = "const Tick curTime, " + params
code('${{self.c_ident}}($params)')
# Call superclass constructor
if "interface" in self:
if self.isMessage:
code(' : ${{self["interface"]}}(curTime)')
else:
code(' : ${{self["interface"]}}()')
code('{')
code.indent()
for dm in self.data_members.values():
code('m_${{dm.ident}} = local_${{dm.ident}};')
if "nextLineCallHack" in dm:
code('m_${{dm.ident}}${{dm["nextLineCallHack"]}};')
code.dedent()
code('}')
# create a clone member
if self.isMessage:
code('''
MsgPtr
clone() const
{
return std::shared_ptr<Message>(new ${{self.c_ident}}(*this));
}
''')
else:
code('''
${{self.c_ident}}*
clone() const
{
return new ${{self.c_ident}}(*this);
}
''')
if not self.isGlobal:
# const Get methods for each field
code('// Const accessors methods for each field')
for dm in self.data_members.values():
code('''
/** \\brief Const accessor method for ${{dm.ident}} field.
* \\return ${{dm.ident}} field
*/
const ${{dm.type.c_ident}}&
get${{dm.ident}}() const
{
return m_${{dm.ident}};
}
''')
# Non-const Get methods for each field
code('// Non const Accessors methods for each field')
for dm in self.data_members.values():
code('''
/** \\brief Non-const accessor method for ${{dm.ident}} field.
* \\return ${{dm.ident}} field
*/
${{dm.type.c_ident}}&
get${{dm.ident}}()
{
return m_${{dm.ident}};
}
''')
#Set methods for each field
code('// Mutator methods for each field')
for dm in self.data_members.values():
code('''
/** \\brief Mutator method for ${{dm.ident}} field */
void
set${{dm.ident}}(const ${{dm.type.c_ident}}& local_${{dm.ident}})
{
m_${{dm.ident}} = local_${{dm.ident}};
}
''')
code('void print(std::ostream& out) const;')
code.dedent()
code(' //private:')
code.indent()
# Data members for each field
for dm in self.data_members.values():
if "abstract" not in dm:
const = ""
init = ""
# global structure
if self.isGlobal:
const = "static const "
# init value
if dm.init_code:
# only global structure can have init value here
assert self.isGlobal
init = " = %s" % (dm.init_code)
if "desc" in dm:
code('/** ${{dm["desc"]}} */')
code('$const${{dm.type.c_ident}} m_${{dm.ident}}$init;')
# Prototypes for methods defined for the Type
for item in self.methods:
proto = self.methods[item].prototype
if proto:
code('$proto')
code.dedent()
code('};')
code('''
inline std::ostream&
operator<<(std::ostream& out, const ${{self.c_ident}}& obj)
{
obj.print(out);
out << std::flush;
return out;
}
#endif // __${{self.c_ident}}_HH__
''')
code.write(path, "%s.hh" % self.c_ident)
def printTypeCC(self, path):
code = self.symtab.codeFormatter()
code('''
/** \\file ${{self.c_ident}}.cc
*
* Auto generated C++ code started by $__file__:$__line__
*/
#include <iostream>
#include <memory>
#include "mem/protocol/${{self.c_ident}}.hh"
#include "mem/ruby/system/System.hh"
using namespace std;
''')
code('''
/** \\brief Print the state of this object */
void
${{self.c_ident}}::print(ostream& out) const
{
out << "[${{self.c_ident}}: ";
''')
# For each field
code.indent()
for dm in self.data_members.values():
code('out << "${{dm.ident}} = " << m_${{dm.ident}} << " ";''')
code.dedent()
# Trailer
code('''
out << "]";
}''')
# print the code for the methods in the type
for item in self.methods:
code(self.methods[item].generateCode())
code.write(path, "%s.cc" % self.c_ident)
def printEnumHH(self, path):
code = self.symtab.codeFormatter()
code('''
/** \\file ${{self.c_ident}}.hh
*
* Auto generated C++ code started by $__file__:$__line__
*/
#ifndef __${{self.c_ident}}_HH__
#define __${{self.c_ident}}_HH__
#include <iostream>
#include <string>
''')
if self.isStateDecl:
code('#include "mem/protocol/AccessPermission.hh"')
if self.isMachineType:
code('#include "base/misc.hh"')
code('#include "mem/ruby/common/Address.hh"')
code('struct MachineID;')
code('''
// Class definition
/** \\enum ${{self.c_ident}}
* \\brief ${{self.desc}}
*/
enum ${{self.c_ident}} {
${{self.c_ident}}_FIRST,
''')
code.indent()
# For each field
for i,(ident,enum) in enumerate(self.enums.iteritems()):
desc = enum.get("desc", "No description avaliable")
if i == 0:
init = ' = %s_FIRST' % self.c_ident
else:
init = ''
code('${{self.c_ident}}_${{enum.ident}}$init, /**< $desc */')
code.dedent()
code('''
${{self.c_ident}}_NUM
};
// Code to convert from a string to the enumeration
${{self.c_ident}} string_to_${{self.c_ident}}(const std::string& str);
// Code to convert state to a string
std::string ${{self.c_ident}}_to_string(const ${{self.c_ident}}& obj);
// Code to increment an enumeration type
${{self.c_ident}} &operator++(${{self.c_ident}} &e);
''')
# MachineType hack used to set the base component id for each Machine
if self.isMachineType:
code('''
int ${{self.c_ident}}_base_level(const ${{self.c_ident}}& obj);
MachineType ${{self.c_ident}}_from_base_level(int);
int ${{self.c_ident}}_base_number(const ${{self.c_ident}}& obj);
int ${{self.c_ident}}_base_count(const ${{self.c_ident}}& obj);
''')
for enum in self.enums.itervalues():
if enum.ident == "DMA":
code('''
MachineID map_Address_to_DMA(const Address &addr);
''')
code('''
MachineID get${{enum.ident}}MachineID(NodeID RubyNode);
''')
if self.isStateDecl:
code('''
// Code to convert the current state to an access permission
AccessPermission ${{self.c_ident}}_to_permission(const ${{self.c_ident}}& obj);
''')
# Trailer
code('''
std::ostream& operator<<(std::ostream& out, const ${{self.c_ident}}& obj);
#endif // __${{self.c_ident}}_HH__
''')
code.write(path, "%s.hh" % self.c_ident)
def printEnumCC(self, path):
code = self.symtab.codeFormatter()
code('''
/** \\file ${{self.c_ident}}.hh
*
* Auto generated C++ code started by $__file__:$__line__
*/
#include <cassert>
#include <iostream>
#include <string>
#include "base/misc.hh"
#include "mem/protocol/${{self.c_ident}}.hh"
using namespace std;
''')
if self.isStateDecl:
code('''
// Code to convert the current state to an access permission
AccessPermission ${{self.c_ident}}_to_permission(const ${{self.c_ident}}& obj)
{
switch(obj) {
''')
# For each case
code.indent()
for statePerm in self.statePermPairs:
code(' case ${{self.c_ident}}_${{statePerm[0]}}:')
code(' return AccessPermission_${{statePerm[1]}};')
code.dedent()
code ('''
default:
panic("Unknown state access permission converstion for ${{self.c_ident}}");
}
}
''')
if self.isMachineType:
for enum in self.enums.itervalues():
if enum.get("Primary"):
code('#include "mem/protocol/${{enum.ident}}_Controller.hh"')
code('#include "mem/ruby/common/MachineID.hh"')
code('''
// Code for output operator
ostream&
operator<<(ostream& out, const ${{self.c_ident}}& obj)
{
out << ${{self.c_ident}}_to_string(obj);
out << flush;
return out;
}
// Code to convert state to a string
string
${{self.c_ident}}_to_string(const ${{self.c_ident}}& obj)
{
switch(obj) {
''')
# For each field
code.indent()
for enum in self.enums.itervalues():
code(' case ${{self.c_ident}}_${{enum.ident}}:')
code(' return "${{enum.ident}}";')
code.dedent()
# Trailer
code('''
default:
panic("Invalid range for type ${{self.c_ident}}");
}
}
// Code to convert from a string to the enumeration
${{self.c_ident}}
string_to_${{self.c_ident}}(const string& str)
{
''')
# For each field
start = ""
code.indent()
for enum in self.enums.itervalues():
code('${start}if (str == "${{enum.ident}}") {')
code(' return ${{self.c_ident}}_${{enum.ident}};')
start = "} else "
code.dedent()
code('''
} else {
panic("Invalid string conversion for %s, type ${{self.c_ident}}", str);
}
}
// Code to increment an enumeration type
${{self.c_ident}}&
operator++(${{self.c_ident}}& e)
{
assert(e < ${{self.c_ident}}_NUM);
return e = ${{self.c_ident}}(e+1);
}
''')
# MachineType hack used to set the base level and number of
# components for each Machine
if self.isMachineType:
code('''
/** \\brief returns the base vector index for each machine type to be
* used by NetDest
*
* \\return the base vector index for each machine type to be used by NetDest
* \\see NetDest.hh
*/
int
${{self.c_ident}}_base_level(const ${{self.c_ident}}& obj)
{
switch(obj) {
''')
# For each field
code.indent()
for i,enum in enumerate(self.enums.itervalues()):
code(' case ${{self.c_ident}}_${{enum.ident}}:')
code(' return $i;')
code.dedent()
# total num
code('''
case ${{self.c_ident}}_NUM:
return ${{len(self.enums)}};
default:
panic("Invalid range for type ${{self.c_ident}}");
}
}
/** \\brief returns the machine type for each base vector index used by NetDest
*
* \\return the MachineType
*/
MachineType
${{self.c_ident}}_from_base_level(int type)
{
switch(type) {
''')
# For each field
code.indent()
for i,enum in enumerate(self.enums.itervalues()):
code(' case $i:')
code(' return ${{self.c_ident}}_${{enum.ident}};')
code.dedent()
# Trailer
code('''
default:
panic("Invalid range for type ${{self.c_ident}}");
}
}
/** \\brief The return value indicates the number of components created
* before a particular machine\'s components
*
* \\return the base number of components for each machine
*/
int
${{self.c_ident}}_base_number(const ${{self.c_ident}}& obj)
{
int base = 0;
switch(obj) {
''')
# For each field
code.indent()
code(' case ${{self.c_ident}}_NUM:')
for enum in reversed(self.enums.values()):
# Check if there is a defined machine with this type
if enum.get("Primary"):
code(' base += ${{enum.ident}}_Controller::getNumControllers();')
else:
code(' base += 0;')
code(' case ${{self.c_ident}}_${{enum.ident}}:')
code(' break;')
code.dedent()
code('''
default:
panic("Invalid range for type ${{self.c_ident}}");
}
return base;
}
/** \\brief returns the total number of components for each machine
* \\return the total number of components for each machine
*/
int
${{self.c_ident}}_base_count(const ${{self.c_ident}}& obj)
{
switch(obj) {
''')
# For each field
for enum in self.enums.itervalues():
code('case ${{self.c_ident}}_${{enum.ident}}:')
if enum.get("Primary"):
code('return ${{enum.ident}}_Controller::getNumControllers();')
else:
code('return 0;')
# total num
code('''
case ${{self.c_ident}}_NUM:
default:
panic("Invalid range for type ${{self.c_ident}}");
}
}
''')
for enum in self.enums.itervalues():
if enum.ident == "DMA":
code('''
MachineID
map_Address_to_DMA(const Address &addr)
{
MachineID dma = {MachineType_DMA, 0};
return dma;
}
''')
code('''
MachineID
get${{enum.ident}}MachineID(NodeID RubyNode)
{
MachineID mach = {MachineType_${{enum.ident}}, RubyNode};
return mach;
}
''')
# Write the file
code.write(path, "%s.cc" % self.c_ident)
__all__ = [ "Type" ]
| {
"content_hash": "f859062405711f888365aaf30707f353",
"timestamp": "",
"source": "github",
"line_count": 773,
"max_line_length": 88,
"avg_line_length": 27.927554980595083,
"alnum_prop": 0.5432647767278117,
"repo_name": "alianmohammad/pd-gem5-latest",
"id": "73d6f9c699947e22fb5424db1ccd8253621e5dd7",
"size": "21588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mem/slicc/symbols/Type.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "239800"
},
{
"name": "C",
"bytes": "1025519"
},
{
"name": "C++",
"bytes": "14964072"
},
{
"name": "CMake",
"bytes": "2202"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "HTML",
"bytes": "136898"
},
{
"name": "Hack",
"bytes": "2489"
},
{
"name": "Java",
"bytes": "3096"
},
{
"name": "Makefile",
"bytes": "38436"
},
{
"name": "Perl",
"bytes": "33602"
},
{
"name": "Protocol Buffer",
"bytes": "7033"
},
{
"name": "Python",
"bytes": "4014196"
},
{
"name": "Shell",
"bytes": "91025"
},
{
"name": "VimL",
"bytes": "4335"
},
{
"name": "Visual Basic",
"bytes": "2884"
}
],
"symlink_target": ""
} |
"""Support for Daikin AC sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from pydaikin.daikin_base import Appliance
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ENERGY_KILO_WATT_HOUR,
FREQUENCY_HERTZ,
PERCENTAGE,
POWER_KILO_WATT,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi
from .const import (
ATTR_COMPRESSOR_FREQUENCY,
ATTR_COOL_ENERGY,
ATTR_HEAT_ENERGY,
ATTR_HUMIDITY,
ATTR_INSIDE_TEMPERATURE,
ATTR_OUTSIDE_TEMPERATURE,
ATTR_TARGET_HUMIDITY,
ATTR_TOTAL_ENERGY_TODAY,
ATTR_TOTAL_POWER,
)
@dataclass
class DaikinRequiredKeysMixin:
"""Mixin for required keys."""
value_func: Callable[[Appliance], float | None]
@dataclass
class DaikinSensorEntityDescription(SensorEntityDescription, DaikinRequiredKeysMixin):
"""Describes Daikin sensor entity."""
SENSOR_TYPES: tuple[DaikinSensorEntityDescription, ...] = (
DaikinSensorEntityDescription(
key=ATTR_INSIDE_TEMPERATURE,
name="Inside Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=TEMP_CELSIUS,
value_func=lambda device: device.inside_temperature,
),
DaikinSensorEntityDescription(
key=ATTR_OUTSIDE_TEMPERATURE,
name="Outside Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=TEMP_CELSIUS,
value_func=lambda device: device.outside_temperature,
),
DaikinSensorEntityDescription(
key=ATTR_HUMIDITY,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
value_func=lambda device: device.humidity,
),
DaikinSensorEntityDescription(
key=ATTR_TARGET_HUMIDITY,
name="Target Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
value_func=lambda device: device.humidity,
),
DaikinSensorEntityDescription(
key=ATTR_TOTAL_POWER,
name="Estimated Power Consumption",
device_class=SensorDeviceClass.POWER,
native_unit_of_measurement=POWER_KILO_WATT,
value_func=lambda device: round(device.current_total_power_consumption, 2),
),
DaikinSensorEntityDescription(
key=ATTR_COOL_ENERGY,
name="Cool Energy Consumption",
icon="mdi:snowflake",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
value_func=lambda device: round(device.last_hour_cool_energy_consumption, 2),
),
DaikinSensorEntityDescription(
key=ATTR_HEAT_ENERGY,
name="Heat Energy Consumption",
icon="mdi:fire",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
value_func=lambda device: round(device.last_hour_heat_energy_consumption, 2),
),
DaikinSensorEntityDescription(
key=ATTR_COMPRESSOR_FREQUENCY,
name="Compressor Frequency",
icon="mdi:fan",
native_unit_of_measurement=FREQUENCY_HERTZ,
value_func=lambda device: device.compressor_frequency,
),
DaikinSensorEntityDescription(
key=ATTR_TOTAL_ENERGY_TODAY,
name="Today's Total Energy Consumption",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
value_func=lambda device: round(device.today_total_energy_consumption, 2),
),
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Old way of setting up the Daikin sensors.
Can only be called when a user accidentally mentions the platform in their
config. But even in that case it would have been ignored.
"""
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Daikin climate based on config_entry."""
daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id)
sensors = [ATTR_INSIDE_TEMPERATURE]
if daikin_api.device.support_outside_temperature:
sensors.append(ATTR_OUTSIDE_TEMPERATURE)
if daikin_api.device.support_energy_consumption:
sensors.append(ATTR_TOTAL_POWER)
sensors.append(ATTR_COOL_ENERGY)
sensors.append(ATTR_HEAT_ENERGY)
sensors.append(ATTR_TOTAL_ENERGY_TODAY)
if daikin_api.device.support_humidity:
sensors.append(ATTR_HUMIDITY)
sensors.append(ATTR_TARGET_HUMIDITY)
if daikin_api.device.support_compressor_frequency:
sensors.append(ATTR_COMPRESSOR_FREQUENCY)
entities = [
DaikinSensor(daikin_api, description)
for description in SENSOR_TYPES
if description.key in sensors
]
async_add_entities(entities)
class DaikinSensor(SensorEntity):
"""Representation of a Sensor."""
entity_description: DaikinSensorEntityDescription
def __init__(
self, api: DaikinApi, description: DaikinSensorEntityDescription
) -> None:
"""Initialize the sensor."""
self.entity_description = description
self._api = api
self._attr_name = f"{api.name} {description.name}"
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._api.device.mac}-{self.entity_description.key}"
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
return self.entity_description.value_func(self._api.device)
async def async_update(self):
"""Retrieve latest state."""
await self._api.async_update()
@property
def device_info(self):
"""Return a device description for device registry."""
return self._api.device_info
| {
"content_hash": "e00fe7e88bc4f6ead62ef3e2b25cf23d",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 86,
"avg_line_length": 33.36548223350254,
"alnum_prop": 0.6963334854708657,
"repo_name": "GenericStudent/home-assistant",
"id": "39a6f4aa853474c5951792905ecefdf03865b5e8",
"size": "6573",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/daikin/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
} |
from bluesky.callbacks.olog import logbook_cb_factory
from functools import partial
from pyOlog import SimpleOlogClient
import queue
import threading
from warnings import warn
import nslsii
# Set up the logbook. This configures bluesky's summaries of
# data acquisition (scan type, ID, etc.).
LOGBOOKS = ['Data Acquisition'] # list of logbook names to publish to
simple_olog_client = SimpleOlogClient()
generic_logbook_func = simple_olog_client.log
configured_logbook_func = partial(generic_logbook_func, logbooks=LOGBOOKS)
# This is for ophyd.commands.get_logbook, which simply looks for
# a variable called 'logbook' in the global IPython namespace.
logbook = simple_olog_client
cb = logbook_cb_factory(configured_logbook_func)
nslsii.configure_olog(get_ipython().user_ns, callback=cb)
| {
"content_hash": "3c865248db890e91b7f2ba5c4d161b84",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 74,
"avg_line_length": 33.125,
"alnum_prop": 0.7924528301886793,
"repo_name": "NSLS-II-XPD/ipython_ophyd",
"id": "dcf4891d1f48bba0e6a1e9adcbf1e1afef76d7e3",
"size": "795",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "profile_collection/startup/01-olog-integration.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "864"
},
{
"name": "JavaScript",
"bytes": "10578"
},
{
"name": "Python",
"bytes": "776274"
}
],
"symlink_target": ""
} |
import multiprocessing as mp
import numpy as np
import pyfftw
import scipy.ndimage as ndi
def apple_core_3d(shape, res, nm):
r"""Return a binary array with the apple core in 3D
Parameters
----------
shape: list-like, length 3
Shape of the reconstruction volume for which to compute
the apple core; The second (y-) axis is assumed to be the
axis of symmetry (according to ODTbrain standard notation)
res: float
Size of the vacuum wave length :math:`\lambda` in pixels
nm: float
Refractive index of the medium :math:`n_\mathrm{m}`
Returns
-------
core: 3D ndarray
The mask is `True` for positions within the apple core
"""
km = (2 * np.pi * nm) / res
lNx, lNy, lNz = shape
if lNx != lNz:
raise ValueError("`shape[0]` and `shape[2]` must be identical!")
fx = np.fft.fftfreq(lNx).reshape(-1, 1, 1)
fy = np.fft.fftfreq(lNy).reshape(1, -1, 1)
fz = np.fft.fftfreq(lNz).reshape(1, 1, -1)
ky = 2*np.pi * fy
kxz = 2*np.pi * np.sqrt(fx**2 + fz**2)
kr = 2*np.pi * np.sqrt(fx**2 + fy**2 + fz**2)
# 1. initiate empy array
core = np.zeros(shape, dtype=bool)
# 2. fill core
root = 2*km*kxz - kxz**2
root[root < 0] = 0
core[np.abs(ky) > np.sqrt(root)] = True
# 3. remove enveloping sphere (resolution limit)
core[kr > np.sqrt(2) * km] = False
return core
def constraint_nn(data, mask=None, bg_shell=None):
"""Non-negativity constraint"""
# No imaginary RI (no absorption)
if np.iscomplexobj(data):
data.imag[:] = 0
# background medium shell
if bg_shell is not None:
data.real[bg_shell] = 0
# Also remove outer shell
spov = spillover_region(data.shape)
data.real[spov] = 0
lowri = data.real < 0
if mask is not None:
# honor given mask
lowri *= mask
data.real[lowri] = 0
def constraint_sh(data, mask=None, bg_shell=None):
"""Symmetric histogram background data constraint"""
# No imaginary RI (no absorption)
if np.iscomplexobj(data):
data.imag[:] = 0
# determine range of medium RI (using background support)
spov = spillover_region(data.shape)
if bg_shell is not None:
spov |= bg_shell
fmin = np.min(data.real[spov])
fmax = np.max(data.real[spov])
# center
full_hist, full_edge = np.histogram(
data.real, bins=100, range=(fmin, fmax))
de = full_edge[1] - full_edge[0]
full_f = full_edge[1:] - de/2
# center index (actually we would expect f_c==0)
idx_c = np.argmax(full_hist)
# half-maximum indices
idx_start = idx_c - count_to_half(full_hist[:idx_c][::-1])
idx_end = idx_c + count_to_half(full_hist[idx_c:])
# RI values outside
below = (data.real > fmin) * (data.real < full_f[idx_start])
above = (data.real > full_f[idx_end]) * (data.real < fmax)
out = below | above
if mask is not None:
# honor given mask
out *= mask
# push RI values to zero
data.real[out] *= .5
if bg_shell is not None:
# push known background data to zero
data.real[bg_shell] *= .5
def correct(f, res, nm, method="nn", mask=None, bg_shell_width=None,
enforce_envelope=0.95, max_iter=100, min_diff=.01,
count=None, max_count=None):
r"""Fill the missing apple core of the object function
Parameters
----------
f: 3D ndarray
Complex objec function :math:`f(\mathbf{r})`
res: float
Size of the vacuum wave length :math:`\lambda` in pixels
nm: float
Refractive index of the medium :math:`n_\mathrm{m}` that
surrounds the object in :math:`n(\mathbf{r})`
method: str
One of:
- "nn": non-negativity constraint (`f > 0`). This method
resembles classic missing apple core correction.
- "sh": symmetric histogram constraint (background data in
`f`). This method works well for sparse-gradient data (e.g.
works better than "nn" for simulated data), but might result
in stripe-like artifacts when applied to experimental data.
The imaginary part of the refractive index is suppressed
in both cases.
Note that these constraints are soft, i.e. after the final
inverse Fourier transform, the conditions might not be met.
mask: 3D boolean ndarray, or None
Optional, defines background region(s) used for enforcing
`method`. If a boolean ndarray, the values set to `True` define
the used background regions.
bg_shell_width: float
Optional, defines the width of an ellipsoid shell (outer radii
matching image shape) that is used additionally for enforcing
`method`.
enforce_envelope: float in interval [0,1] or False
Set the suppression factor for frequencies that are above
the envelope function; disabled if set to False or 0
max_iter: int
Maximum number of iterations to perform
min_diff: float
Stopping criterion computed as the relative difference
(relative to the first iteration `norm`) of the changes applied
during the current iteration `cur_diff`:
``np.abs(cur_diff/norm) < min_diff``
count: multiprocessing.Value
May be used for tracking progress. At each iteration
`count.value` is incremented by one.
max_count: multiprocessing.Value
May be used for tracking progress; is incremented initially.
Notes
-----
Internally, the Fourier transform is performed with single-precision
floating point values (complex64).
"""
if enforce_envelope < 0 or enforce_envelope > 1:
raise ValueError("`enforce_envelope` must be in interval [0, 1]")
if max_count is not None:
with max_count.get_lock():
max_count.value += max_iter + 2
# Location of the apple core
core = apple_core_3d(shape=f.shape, res=res, nm=nm)
if count is not None:
with count.get_lock():
count.value += 1
data = pyfftw.empty_aligned(f.shape, dtype='complex64')
ftdata = pyfftw.empty_aligned(f.shape, dtype='complex64')
fftw_forw = pyfftw.FFTW(data, ftdata,
axes=(0, 1, 2),
direction="FFTW_FORWARD",
flags=["FFTW_MEASURE"],
threads=mp.cpu_count())
# Note: input array `ftdata` is destroyed when invoking `fftw_back`
fftw_back = pyfftw.FFTW(ftdata, data,
axes=(0, 1, 2),
direction="FFTW_BACKWARD",
flags=["FFTW_MEASURE"],
threads=mp.cpu_count())
data.real[:] = f.real
data.imag[:] = 0
fftw_forw.execute()
ftdata_orig = ftdata.copy()
if count is not None:
with count.get_lock():
count.value += 1
if enforce_envelope:
# Envelope function of Fourier amplitude
ftevlp = envelope_gauss(ftdata_orig, core)
init_state = np.sum(np.abs(ftdata_orig[core])) / data.size
prev_state = init_state
if bg_shell_width is not None:
bg_shell = ellipsoid_shell(data.shape, width=bg_shell_width)
else:
bg_shell = None
for ii in range(max_iter):
if method == "nn":
# non-negativity
constraint_nn(data=data, mask=mask, bg_shell=bg_shell)
elif method == "sh":
# symmetric histogram
constraint_sh(data=data, mask=mask, bg_shell=bg_shell)
# Go into Fourier domain
fftw_forw.execute()
if enforce_envelope:
# Suppress large frequencies with the envelope
high = np.abs(ftdata) > ftevlp
ftdata[high] *= enforce_envelope
if method == "sh":
# update dc term
ftdata_orig[0, 0, 0] = (ftdata_orig[0, 0, 0] + ftdata[0, 0, 0])/2
# Enforce original data
ftdata[~core] = ftdata_orig[~core]
fftw_back.execute()
data[:] /= fftw_forw.N
if count is not None:
with count.get_lock():
count.value += 1
cur_state = np.sum(np.abs(ftdata[core])) / data.size
cur_diff = cur_state - prev_state
if ii == 0:
norm = cur_diff
else:
if np.abs(cur_diff/norm) < min_diff:
break
prev_state = cur_state
if count is not None:
with count.get_lock():
# add skipped counts (due to stopping criterion)
count.value += max_iter - ii - 1
return data
def count_to_half(array):
"""Determination of half-initial value index
Return first index at which array values decrease below 1/2 of
the initial initial value `array[0]`.
"""
num = 0
for item in array[1:]:
if item < array[0] / 2:
break
else:
num += 1
return num
def ellipsoid_shell(shape, width=20):
"""Return background ellipsoid shell"""
spov_outer = spillover_region(shape, shell=0)
spov_inner = spillover_region(shape, shell=width)
reg = spov_outer ^ spov_inner
return reg
def envelope_gauss(ftdata, core):
r"""Compute a gaussian-filtered envelope, without apple core
Parameters
----------
ftdata: 3D ndarray
Fourier transform of the object function data
(zero frequency not shifted to center of array)
core: 3D ndarray (same shape as ftdata)
Apple core (as defined by :func:`apple_core_3d`)
Returns
-------
envelope: 3D ndarray
Envelope function in Fourier space
"""
hull = np.abs(ftdata)
hull[core] = np.nan # label core data with nans
# Fill the apple core region with data from known regions from
# the other axes (we only need an estimate if the envelope, so
# this is a very good estimation of the Fourier amplitudes).
shx, shy, _ = hull.shape
maxsh = max(shx, shy)
dsh = abs(shy - shx) // 2
# Determine the slice
if shx > shy:
theslice = (slice(0, shx),
slice(dsh, shy+dsh),
slice(0, shx))
else:
theslice = (slice(dsh, shx+dsh),
slice(0, shy),
slice(dsh, shx+dsh),
)
# 1. Create padded versions of the arrays, because shx and shy
# can be different and inserting a transposed array will not work.
hull_pad = np.zeros((maxsh, maxsh, maxsh), dtype=float)
hull_pad[theslice] = np.fft.fftshift(hull)
core_pad = np.zeros((maxsh, maxsh, maxsh), dtype=bool)
core_pad[theslice] = np.fft.fftshift(core)
# 2. Fill values from other axes were data are missing.
hull_pad[core_pad] = np.transpose(hull_pad, (1, 0, 2))[core_pad]
# 3. Fill any remaining nan-values (due to different shape or tilt)
# with nearest neighbors. Use a distance transform for nearest
# neighbor interpolation.
invalid = np.isnan(hull_pad)
ind = ndi.distance_transform_edt(invalid,
return_distances=False,
return_indices=True)
hull_pad[:] = hull_pad[tuple(ind)]
# 4. Write the data back to the original array.
hull[:] = np.fft.ifftshift(hull_pad[theslice])
# Perform gaussian blurring (shift data to make it smooth)
gauss = ndi.gaussian_filter(input=np.fft.fftshift(hull),
sigma=np.max(ftdata.shape)/100,
mode="constant",
cval=0,
truncate=4.0)
# Shift back gauss
shifted_gauss = np.fft.ifftshift(gauss)
return shifted_gauss
def spillover_region(shape, shell=0):
"""Return boolean array for region outside ellipsoid"""
x = np.arange(shape[0]).reshape(-1, 1, 1)
y = np.arange(shape[1]).reshape(1, -1, 1)
z = np.arange(shape[2]).reshape(1, 1, -1)
cx, cy, cz = np.array(shape) / 2
spov = (((x-cx)/(cx-shell))**2
+ ((y-cy)/(cy-shell))**2
+ ((z-cz)/(cz-shell))**2) > 1
return spov
| {
"content_hash": "4566fe42057846357cf7675d9b1c26c1",
"timestamp": "",
"source": "github",
"line_count": 363,
"max_line_length": 77,
"avg_line_length": 33.47933884297521,
"alnum_prop": 0.5887435201184893,
"repo_name": "paulmueller/ODTbrain",
"id": "9d9b7e7f8896ef9eeed70237dd27f2616df814ad",
"size": "12153",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "odtbrain/apple.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "150751"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Story.time'
db.delete_column(u'scrum_story', 'time')
# Adding field 'Story.last_modified'
db.add_column(u'scrum_story', 'last_modified',
self.gf('django.db.models.fields.DateTimeField')(auto_now=True, default=datetime.datetime(2013, 9, 28, 0, 0), blank=True),
keep_default=False)
# Adding field 'Story.estimated_time'
db.add_column(u'scrum_story', 'estimated_time',
self.gf('django.db.models.fields.FloatField')(default=5),
keep_default=False)
# Adding field 'Task.estimated_time'
db.add_column(u'scrum_task', 'estimated_time',
self.gf('django.db.models.fields.FloatField')(default=5),
keep_default=False)
# Adding field 'Sprint.last_modified'
db.add_column(u'scrum_sprint', 'last_modified',
self.gf('django.db.models.fields.DateTimeField')(auto_now=True, default=datetime.datetime(2013, 9, 28, 0, 0), blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Story.time'
db.add_column(u'scrum_story', 'time',
self.gf('django.db.models.fields.DateTimeField')(auto_now=True, default=5, blank=True),
keep_default=False)
# Deleting field 'Story.last_modified'
db.delete_column(u'scrum_story', 'last_modified')
# Deleting field 'Story.estimated_time'
db.delete_column(u'scrum_story', 'estimated_time')
# Deleting field 'Task.estimated_time'
db.delete_column(u'scrum_task', 'estimated_time')
# Deleting field 'Sprint.last_modified'
db.delete_column(u'scrum_sprint', 'last_modified')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'scrum.project': {
'Meta': {'ordering': "('id',)", 'object_name': 'Project'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'Project_users'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"})
},
u'scrum.sprint': {
'Meta': {'ordering': "('number',)", 'unique_together': "(('number', 'project'),)", 'object_name': 'Sprint'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.IntegerField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Sprint_project'", 'to': u"orm['scrum.Project']"}),
'tasks': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['scrum.Task']", 'null': 'True', 'through': u"orm['scrum.SprintTasks']", 'blank': 'True'})
},
u'scrum.sprinttasks': {
'Meta': {'object_name': 'SprintTasks'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sprint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sprints'", 'to': u"orm['scrum.Sprint']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tasks'", 'to': u"orm['scrum.Task']"}),
'task_end_status': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
u'scrum.story': {
'Meta': {'ordering': "('title',)", 'object_name': 'Story'},
'estimated_time': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Story_project'", 'to': u"orm['scrum.Project']"}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'scrum.task': {
'Meta': {'ordering': "('id',)", 'object_name': 'Task'},
'assigned_to': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'Task_users'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'estimated_time': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'story': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Task_story'", 'to': u"orm['scrum.Story']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['scrum'] | {
"content_hash": "893dd090339ec530d8f4920b96589626",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 203,
"avg_line_length": 65.45185185185186,
"alnum_prop": 0.5552286102308737,
"repo_name": "OpenSourceSag/django-dash2013",
"id": "29eed5b98414069f8b0bf15b8448ad2c22b1c35e",
"size": "8860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scrum/migrations/0004_auto__del_field_story_time__add_field_story_last_modified__add_field_s.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "119635"
},
{
"name": "JavaScript",
"bytes": "66107"
},
{
"name": "Python",
"bytes": "80188"
}
],
"symlink_target": ""
} |
import re
def variableName(name):
# Return if its a valid variable name. It should start with
# an alphabetic character or with underscore, and then can be
# followed by any alphanumeric characters and underscores.
return bool(re.match("^[a-zA-Z_][a-zA-Z_0-9]*$", name))
| {
"content_hash": "7a73856e03f0095df98f453ab1e641d9",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 65,
"avg_line_length": 47.833333333333336,
"alnum_prop": 0.7038327526132404,
"repo_name": "zubie7a/CPP",
"id": "8b0c1e3a6813af2f06a6b2defec405ace584ffb8",
"size": "351",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "CodeSignal/Arcade/Intro/Level_06/03_Variable_Name.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "290798"
}
],
"symlink_target": ""
} |
'''
Created on Nov 2, 2013
@author: peterb
'''
import logging
class Shy(object):
QUEUE = "queue"
def __init__(self):
self.subscriptions = []
def subscribe(self, listener, name=None):
self.subscriptions.append((name,listener))
logging.info("subscription to %s",name)
def unsubscribe(self, listener, name=None):
self.subscriptions = filter(lambda s: s!=(name,listener), self.subscriptions)
logging.info("unsubscribed to %s",name)
def broadcast(self, message, options=None):
queue = options.get(self.QUEUE) if options else None
for name,listener in self.subscriptions:
if name is None or name == queue:
listener(message, options)
| {
"content_hash": "f1ad077ff7d85f9beda05b1b0b55fdfb",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 85,
"avg_line_length": 24.970588235294116,
"alnum_prop": 0.552414605418139,
"repo_name": "blueshed/blueshed-py",
"id": "83718cd9cfc33bf4f957ed2fe05c311ec7b573f4",
"size": "849",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/blueshed/shy/shy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "243536"
},
{
"name": "HTML",
"bytes": "12490"
},
{
"name": "JavaScript",
"bytes": "1073125"
},
{
"name": "Python",
"bytes": "193132"
},
{
"name": "Shell",
"bytes": "109"
}
],
"symlink_target": ""
} |
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import socket
import sys
import traceback
from oslo.config import cfg
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import jsonutils
from nova.openstack.common import local
# NOTE(flaper87): Pls, remove when graduating this module
# from the incubator.
from nova.openstack.common.strutils import mask_password # noqa
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo.messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN',
"keystonemiddleware=WARN", "routes.middleware=WARN",
"stevedore=WARN"]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"nova.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
if CONF.use_syslog:
try:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(facility=facility)
else:
syslog = logging.handlers.SysLogHandler(facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog'
'is running.')
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
| {
"content_hash": "219e6ee43280aa3643e2716c5446c20e",
"timestamp": "",
"source": "github",
"line_count": 696,
"max_line_length": 78,
"avg_line_length": 36.775862068965516,
"alnum_prop": 0.5924753867791842,
"repo_name": "vmthunder/nova",
"id": "5b8c6857912dc49d2314616e8d4a524ad39c895b",
"size": "26367",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "nova/openstack/common/log.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""
Python Flight Mechanics Engine (PyFME).
Copyright (c) AeroPython Development Team.
Distributed under the terms of the MIT License.
Wind Models
-----------
"""
import numpy as np
class NoWind(object):
def __init__(self):
# Wind velocity: FROM North to South, FROM East to West,
# Wind velocity in the UPSIDE direction
self.horizon = np.zeros([3], dtype=float)
self.body = np.zeros([3], dtype=float)
def update(self, state):
pass
| {
"content_hash": "e6f61e34c04289d28b8fab97616a0274",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 64,
"avg_line_length": 22.09090909090909,
"alnum_prop": 0.6378600823045267,
"repo_name": "AlexS12/PyFME",
"id": "4d6d51a4e800f5cdca3eae7ad7d9578052aa88ab",
"size": "486",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/pyfme/environment/wind.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "190916"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
from openstack_dashboard.dashboards.project.networks.ports.views \
import DetailView
PORTS = r'^(?P<port_id>[^/]+)/%s$'
VIEW_MOD = 'openstack_dashboard.dashboards.project.networks.ports.views'
urlpatterns = patterns(VIEW_MOD,
url(PORTS % 'detail', DetailView.as_view(), name='detail')
)
| {
"content_hash": "fe6dd4c819f28d07a998ffa3a74eacb5",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 72,
"avg_line_length": 27.785714285714285,
"alnum_prop": 0.7403598971722365,
"repo_name": "rackerlabs/horizon",
"id": "be839b52d88578eef9af8e6d0eac83190b23d5b2",
"size": "1042",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/networks/ports/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
def divide(numerator, denominator):
""" function to perform division of two numbers. This should not perform
integer division
Raises:
ZeroDivisionError: raised if denominator is zero
"""
return numerator/denominator | {
"content_hash": "6c68059cd04b15755f489bf6e0b7964b",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 76,
"avg_line_length": 33,
"alnum_prop": 0.6628787878787878,
"repo_name": "aaronta/illinois",
"id": "8a508385f088d820b6b93aecd5481dc81c6a8a79",
"size": "264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unit_testing/src/divide.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "425"
},
{
"name": "CMake",
"bytes": "94"
},
{
"name": "Jupyter Notebook",
"bytes": "421761"
},
{
"name": "Python",
"bytes": "3588"
},
{
"name": "R",
"bytes": "18922"
}
],
"symlink_target": ""
} |
import sys
import pickle
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
features_list = [
'poi',
'salary',
# 'deferral_payments',
# 'total_payments',
# 'loan_advances',
'bonus',
'bonus_salary_ratio',
# 'restricted_stock_deferred',
# 'deferred_income',
'total_stock_value',
# 'expenses',
'exercised_stock_options',
# 'other',
# 'long_term_incentive',
# 'restricted_stock',
# 'director_fees',
# 'to_messages',
# 'from_poi_to_this_person',
# 'from_poi_to_this_person_percentage',
# 'from_messages',
# 'from_this_person_to_poi',
'from_this_person_to_poi_percentage',
'shared_receipt_with_poi'
]
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
### Task 2: Remove outliers
data_dict.pop("TOTAL")
data_dict.pop("THE TRAVEL AGENCY IN THE PARK")
### Task 3: Create new feature(s)
# Bonus-salary ratio
for employee, features in data_dict.iteritems():
if features['bonus'] == "NaN" or features['salary'] == "NaN":
features['bonus_salary_ratio'] = "NaN"
else:
features['bonus_salary_ratio'] = float(features['bonus']) / float(features['salary'])
# from_this_person_to_poi as a percentage of from_messages
for employee, features in data_dict.iteritems():
if features['from_this_person_to_poi'] == "NaN" or features['from_messages'] == "NaN":
features['from_this_person_to_poi_percentage'] = "NaN"
else:
features['from_this_person_to_poi_percentage'] = float(features['from_this_person_to_poi']) / float(features['from_messages'])
# from_poi_to_this_person as a percentage of to_messages
for employee, features in data_dict.iteritems():
if features['from_poi_to_this_person'] == "NaN" or features['to_messages'] == "NaN":
features['from_poi_to_this_person_percentage'] = "NaN"
else:
features['from_poi_to_this_person_percentage'] = float(features['from_poi_to_this_person']) / float(features['to_messages'])
### Impute missing email features to mean
email_features = ['to_messages',
'from_poi_to_this_person',
'from_poi_to_this_person_percentage',
'from_messages',
'from_this_person_to_poi',
'from_this_person_to_poi_percentage',
'shared_receipt_with_poi']
from collections import defaultdict
email_feature_sums = defaultdict(lambda:0)
email_feature_counts = defaultdict(lambda:0)
for employee, features in data_dict.iteritems():
for ef in email_features:
if features[ef] != "NaN":
email_feature_sums[ef] += features[ef]
email_feature_counts[ef] += 1
email_feature_means = {}
for ef in email_features:
email_feature_means[ef] = float(email_feature_sums[ef]) / float(email_feature_counts[ef])
for employee, features in data_dict.iteritems():
for ef in email_features:
if features[ef] == "NaN":
features[ef] = email_feature_means[ef]
### Store to my_dataset for easy export below.
my_dataset = data_dict
### Extract features and labels from dataset for local testing
data = featureFormat(my_dataset, features_list, sort_keys=True)
labels, features = targetFeatureSplit(data)
### Task 4: Try a variety of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
from sklearn.preprocessing import MinMaxScaler
from sklearn.feature_selection import SelectKBest
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.pipeline import Pipeline
# Potential pipeline steps
scaler = MinMaxScaler()
select = SelectKBest()
dtc = DecisionTreeClassifier()
svc = SVC()
knc = KNeighborsClassifier()
# Load pipeline steps into list
steps = [
# Preprocessing
# ('min_max_scaler', scaler),
# Feature selection
('feature_selection', select),
# Classifier
('dtc', dtc)
# ('svc', svc)
# ('knc', knc)
]
# Create pipeline
pipeline = Pipeline(steps)
# Parameters to try in grid search
parameters = dict(
feature_selection__k=[2, 3, 5, 6],
dtc__criterion=['gini', 'entropy'],
# dtc__splitter=['best', 'random'],
dtc__max_depth=[None, 1, 2, 3, 4],
dtc__min_samples_split=[1, 2, 3, 4, 25],
# dtc__min_samples_leaf=[1, 2, 3, 4],
# dtc__min_weight_fraction_leaf=[0, 0.25, 0.5],
dtc__class_weight=[None, 'balanced'],
dtc__random_state=[42]
# svc__C=[0.1, 1, 10, 100, 1000],
# svc__kernel=['rbf'],
# svc__gamma=[0.001, 0.0001]
# knc__n_neighbors=[1, 2, 3, 4, 5],
# knc__leaf_size=[1, 10, 30, 60],
# knc__algorithm=['auto', 'ball_tree', 'kd_tree', 'brute']
)
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function. Because of the small size of the dataset, the script uses
### stratified shuffle split cross validation. For more info:
### http://scikit-learn.org/stable/modules/generated/sklearn.cross_validation.StratifiedShuffleSplit.html
from sklearn.cross_validation import train_test_split, StratifiedShuffleSplit
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import classification_report
# Create training sets and test sets
features_train, features_test, labels_train, labels_test = \
train_test_split(features, labels, test_size=0.3, random_state=42)
# Cross-validation for parameter tuning in grid search
sss = StratifiedShuffleSplit(
labels_train,
n_iter = 20,
test_size = 0.5,
random_state = 0
)
# Create, fit, and make predictions with grid search
gs = GridSearchCV(pipeline,
param_grid=parameters,
scoring="f1",
cv=sss,
error_score=0)
gs.fit(features_train, labels_train)
labels_predictions = gs.predict(features_test)
# Pick the classifier with the best tuned parameters
clf = gs.best_estimator_
print "\n", "Best parameters are: ", gs.best_params_, "\n"
# Print features selected and their importances
features_selected=[features_list[i+1] for i in clf.named_steps['feature_selection'].get_support(indices=True)]
scores = clf.named_steps['feature_selection'].scores_
importances = clf.named_steps['dtc'].feature_importances_
import numpy as np
indices = np.argsort(importances)[::-1]
print 'The ', len(features_selected), " features selected and their importances:"
for i in range(len(features_selected)):
print "feature no. {}: {} ({}) ({})".format(i+1,features_selected[indices[i]],importances[indices[i]], scores[indices[i]])
# Print classification report (focus on precision and recall)
report = classification_report( labels_test, labels_predictions )
print(report)
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
dump_classifier_and_data(clf, my_dataset, features_list)
| {
"content_hash": "9867e263f87e2ca2ad38926b55d0db97",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 128,
"avg_line_length": 37.95813953488372,
"alnum_prop": 0.6403627006494302,
"repo_name": "harish-garg/Machine-Learning",
"id": "715b367af2a70dc2ddc190d5e41d8c264ec0ffe5",
"size": "8180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "udacity/enron/ud120-projects-master/final_project/poi_id2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "456078"
},
{
"name": "HTML",
"bytes": "573730"
},
{
"name": "Jupyter Notebook",
"bytes": "496068"
},
{
"name": "Python",
"bytes": "87423"
}
],
"symlink_target": ""
} |
'''
Informations Retrieval Library
==============================
SuperList is an alternatice to Python's default lists (arrays)
'''
# Author: Tarek Amr <@gr33ndata>
class SuperList(list):
''' SuperList: An alternatice to Python's default lists (arrays)
So that we can add some helper methods and functionalities.
'''
def align_to_list(self, b):
''' Make sure self and be are equal in length
'''
if len(self) < len(b):
self.expand(len(b))
elif len(b) < len(self):
b.expand(len(self))
def add(self, b):
if type(b) == int:
self.add_number(b)
else:
self.add_list(b)
def add_list(self, b):
''' Add lists, item to item
'''
self.align_to_list(b)
for i in range(len(self)):
self[i] += b[i]
def add_number(self, b):
for i in range(len(self)):
self[i] += b
def div(self, b):
if type(b) == int:
self.div_number(b)
else:
self.div_list(b)
def div_list(self, b):
self.align_to_list(b)
for i in range(len(self)):
self[i] = float(self[i]) / b[i]
def div_number(self, b):
for i in range(len(self)):
self[i] = float(self[i]) / b
def nonzero_count(self):
''' Returns number of non-zero items in list
'''
return sum([1 for item in self if item > 0])
def unique_append(self, item):
''' Only append item to list if not already there,
In case we want our list to act like a set.
Returns the index of the the added item'''
if item in self:
return self.index(item)
else:
self.append(item)
return len(self) - 1
def _expand(self, new_len=0, padding_data=float(0)):
''' /!\ This is an old piece of code,
We replaced it with more optimized one.
Underscored and left for testing purpose for now.
Expand a list size to new_len,
then fill new cells with padding_data.
The defaul padding_data is float(0).
'''
for i in range(len(self),new_len):
self.append(padding_data)
def expand(self, new_len=0, padding_data=float(0)):
''' Expand a list size to new_len,
then fill new cells with padding_data.
The defaul padding_data is float(0).
'''
new_tail = [padding_data] * (new_len - len(self))
self += new_tail
def insert_after_padding(self, index, item, padding_data=float(0)):
''' Add item in specific index location, and expand if needed.
Notice that the original insert method for lists,
just adds items to end of list if index is bigger than length.
Also, unlike the original list insert method,
if there is existing item at index, it is overwritten.
'''
self.expand(new_len=index+1, padding_data=padding_data)
self[index] = float(item)
def increment_after_padding(self, index, item, padding_data=float(0)):
''' Just like insert_after_padding().
However, existing items at index are incremented.
'''
self.expand(new_len=index+1, padding_data=padding_data)
#print 'increment_after_padding:', index, len(self)
self[index] = self[index] + item
# We need to implement this
# def populate_in_order(self, item, less_than):
def populate_in_reverse_order(self, item, greater_than):
''' Add items to list, but in order
Here we make sure bigger items are put at the beginning of list,
greater_than is the function used to compare items
'''
if self == []:
self.append(item)
elif greater_than(item,self[0]):
self.insert(0,item)
else:
for j in range(0,len(self)):
if greater_than(item,self[j]):
self.insert(j,item)
break
else:
self.append(item)
if __name__ == '__main__':
pass
| {
"content_hash": "9e2bdb36175fc112ad35e47a4c89b634",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 76,
"avg_line_length": 32.627906976744185,
"alnum_prop": 0.5407460204324067,
"repo_name": "gr33ndata/irlib",
"id": "b7446bed4975536104f463c10eb63a87e470a643",
"size": "4209",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "irlib/superlist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "29"
},
{
"name": "Python",
"bytes": "111610"
}
],
"symlink_target": ""
} |
from settings import *
DEBUG = False
ALLOWED_HOSTS = [
'*',
]
DEFAULT_FILE_STORAGE = 'balin.s3utils.MediaRootS3BotoStorage'
STATICFILES_STORAGE = 'balin.s3utils.StaticRootS3BotoStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID', '')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY', '')
AWS_STORAGE_BUCKET_NAME = os.environ.get('BUCKET_NAME', '')
AWS_QUERYSTRING_AUTH = False
AWS_HEADERS = {
'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT',
'Cache-Control': 'max-age=86400',
}
MEDIA_URL = 'https://%s.s3.amazonaws.com/static/' % AWS_STORAGE_BUCKET_NAME
STATIC_URL = 'https://%s.s3.amazonaws.com/static/' % AWS_STORAGE_BUCKET_NAME
| {
"content_hash": "836dd15196a18849525bc6d06b2be249",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 76,
"avg_line_length": 30.40909090909091,
"alnum_prop": 0.6995515695067265,
"repo_name": "gilsondev/balin",
"id": "eb62a0311f75b97a495259f08c264af122627f34",
"size": "693",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balin/settings_heroku.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "140353"
},
{
"name": "JavaScript",
"bytes": "1359394"
},
{
"name": "Python",
"bytes": "15330"
},
{
"name": "Shell",
"bytes": "453"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import time
from arrow.apollo import get_apollo_instance
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Sample script to add an account via web services')
parser.add_argument('email', help='User Email')
parser.add_argument('--first', help='First Name', default='Jane')
parser.add_argument('--last', help='Last Name', default='Aggie')
args = parser.parse_args()
wa = get_apollo_instance()
password = wa.users._password_generator(12)
time.sleep(1)
users = wa.users.get_users()
user = [u for u in users
if u['username'] == args.email]
if len(user) == 1:
# Update name, regen password if the user ran it again
returnData = wa.users.update_user(args.email, args.first, args.last, password)
print('Updated User\nUsername: %s\nPassword: %s' % (args.email, password))
else:
returnData = wa.users.create_user(args.email, args.first, args.last, password, role='user')
print('Created User\nUsername: %s\nPassword: %s' % (args.email, password))
print("Return data: " + str(returnData))
| {
"content_hash": "91aa51de73fe6ae04d2826a66c4f3d43",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 100,
"avg_line_length": 36.4375,
"alnum_prop": 0.6552315608919382,
"repo_name": "galaxy-genome-annotation/galaxy-tools",
"id": "bb30afdd90ce00370da87e6491b35b71a20d067a",
"size": "1188",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/apollo/create_account.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "78373"
},
{
"name": "Shell",
"bytes": "3979"
}
],
"symlink_target": ""
} |
import StringIO
from twisted.python import reflect, failure
from twisted.python.util import dsu
from twisted.internet import defer
from twisted.trial import unittest, runner, reporter
class MockEquality(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return "MockEquality(%s)" % (self.name,)
def __eq__(self, other):
if not hasattr(other, 'name'):
raise ValueError("%r not comparable to %r" % (other, self))
return self.name[0] == other.name[0]
class TestAssertions(unittest.TestCase):
"""Tests for TestCase's assertion methods. That is, failUnless*,
failIf*, assert*.
This is pretty paranoid. Still, a certain paranoia is healthy if you
are testing a unit testing framework.
"""
class FailingTest(unittest.TestCase):
def test_fails(self):
raise self.failureException()
def testFail(self):
try:
self.fail("failed")
except self.failureException, e:
if not str(e) == 'failed':
raise self.failureException("Exception had msg %s instead of %s"
% str(e), 'failed')
else:
raise self.failureException("Call to self.fail() didn't fail test")
def test_failingException_fails(self):
test = runner.TestLoader().loadClass(TestAssertions.FailingTest)
io = StringIO.StringIO()
result = reporter.TestResult()
test.run(result)
self.failIf(result.wasSuccessful())
self.failUnlessEqual(result.errors, [])
self.failUnlessEqual(len(result.failures), 1)
def test_failIf(self):
for notTrue in [0, 0.0, False, None, (), []]:
self.failIf(notTrue, "failed on %r" % (notTrue,))
for true in [1, True, 'cat', [1,2], (3,4)]:
try:
self.failIf(true, "failed on %r" % (true,))
except self.failureException, e:
self.failUnlessEqual(str(e), "failed on %r" % (true,))
else:
self.fail("Call to failIf(%r) didn't fail" % (true,))
def test_failUnless(self):
for notTrue in [0, 0.0, False, None, (), []]:
try:
self.failUnless(notTrue, "failed on %r" % (notTrue,))
except self.failureException, e:
self.failUnlessEqual(str(e), "failed on %r" % (notTrue,))
else:
self.fail("Call to failUnless(%r) didn't fail" % (notTrue,))
for true in [1, True, 'cat', [1,2], (3,4)]:
self.failUnless(true, "failed on %r" % (true,))
def _testEqualPair(self, first, second):
x = self.failUnlessEqual(first, second)
if x != first:
self.fail("failUnlessEqual should return first parameter")
def _testUnequalPair(self, first, second):
try:
self.failUnlessEqual(first, second)
except self.failureException, e:
expected = '%r != %r' % (first, second)
if str(e) != expected:
self.fail("Expected: %r; Got: %s" % (expected, str(e)))
else:
self.fail("Call to failUnlessEqual(%r, %r) didn't fail"
% (first, second))
def test_failUnlessEqual_basic(self):
self._testEqualPair('cat', 'cat')
self._testUnequalPair('cat', 'dog')
self._testEqualPair([1], [1])
self._testUnequalPair([1], 'orange')
def test_failUnlessEqual_custom(self):
x = MockEquality('first')
y = MockEquality('second')
z = MockEquality('fecund')
self._testEqualPair(x, x)
self._testEqualPair(x, z)
self._testUnequalPair(x, y)
self._testUnequalPair(y, z)
def test_failUnlessEqual_incomparable(self):
apple = MockEquality('apple')
orange = ['orange']
try:
self.failUnlessEqual(apple, orange)
except self.failureException:
self.fail("Fail raised when ValueError ought to have been raised.")
except ValueError:
# good. error not swallowed
pass
else:
self.fail("Comparing %r and %r should have raised an exception"
% (apple, orange))
def _raiseError(self, error):
raise error
def test_failUnlessRaises_expected(self):
x = self.failUnlessRaises(ValueError, self._raiseError, ValueError)
self.failUnless(isinstance(x, ValueError),
"Expect failUnlessRaises to return instance of raised "
"exception.")
def test_failUnlessRaises_unexpected(self):
try:
self.failUnlessRaises(ValueError, self._raiseError, TypeError)
except TypeError:
self.fail("failUnlessRaises shouldn't re-raise unexpected "
"exceptions")
except self.failureException, e:
# what we expect
pass
else:
self.fail("Expected exception wasn't raised. Should have failed")
def test_failUnlessRaises_noException(self):
try:
self.failUnlessRaises(ValueError, lambda : None)
except self.failureException, e:
self.failUnlessEqual(str(e),
'ValueError not raised (None returned)')
else:
self.fail("Exception not raised. Should have failed")
def test_failUnlessRaises_failureException(self):
x = self.failUnlessRaises(self.failureException, self._raiseError,
self.failureException)
self.failUnless(isinstance(x, self.failureException),
"Expected %r instance to be returned"
% (self.failureException,))
try:
x = self.failUnlessRaises(self.failureException, self._raiseError,
ValueError)
except self.failureException, e:
# what we expect
pass
else:
self.fail("Should have raised exception")
def test_failIfEqual_basic(self):
x, y, z = [1], [2], [1]
ret = self.failIfEqual(x, y)
self.failUnlessEqual(ret, x,
"failIfEqual should return first parameter")
self.failUnlessRaises(self.failureException,
self.failIfEqual, x, x)
self.failUnlessRaises(self.failureException,
self.failIfEqual, x, z)
def test_failIfEqual_customEq(self):
x = MockEquality('first')
y = MockEquality('second')
z = MockEquality('fecund')
ret = self.failIfEqual(x, y)
self.failUnlessEqual(ret, x,
"failIfEqual should return first parameter")
self.failUnlessRaises(self.failureException,
self.failIfEqual, x, x)
# test when __ne__ is not defined
self.failIfEqual(x, z, "__ne__ not defined, so not equal")
def test_failUnlessIdentical(self):
x, y, z = [1], [1], [2]
ret = self.failUnlessIdentical(x, x)
self.failUnlessEqual(ret, x,
'failUnlessIdentical should return first '
'parameter')
self.failUnlessRaises(self.failureException,
self.failUnlessIdentical, x, y)
self.failUnlessRaises(self.failureException,
self.failUnlessIdentical, x, z)
def test_failUnlessApproximates(self):
x, y, z = 1.0, 1.1, 1.2
self.failUnlessApproximates(x, x, 0.2)
ret = self.failUnlessApproximates(x, y, 0.2)
self.failUnlessEqual(ret, x, "failUnlessApproximates should return "
"first parameter")
self.failUnlessRaises(self.failureException,
self.failUnlessApproximates, x, z, 0.1)
self.failUnlessRaises(self.failureException,
self.failUnlessApproximates, x, y, 0.1)
def test_failUnlessAlmostEqual(self):
precision = 5
x = 8.000001
y = 8.00001
z = 8.000002
self.failUnlessAlmostEqual(x, x, precision)
ret = self.failUnlessAlmostEqual(x, z, precision)
self.failUnlessEqual(ret, x, "failUnlessAlmostEqual should return "
"first parameter (%r, %r)" % (ret, x))
self.failUnlessRaises(self.failureException,
self.failUnlessAlmostEqual, x, y, precision)
def test_failIfAlmostEqual(self):
precision = 5
x = 8.000001
y = 8.00001
z = 8.000002
ret = self.failIfAlmostEqual(x, y, precision)
self.failUnlessEqual(ret, x, "failIfAlmostEqual should return "
"first parameter (%r, %r)" % (ret, x))
self.failUnlessRaises(self.failureException,
self.failIfAlmostEqual, x, x, precision)
self.failUnlessRaises(self.failureException,
self.failIfAlmostEqual, x, z, precision)
def test_failUnlessSubstring(self):
x = "cat"
y = "the dog sat"
z = "the cat sat"
self.failUnlessSubstring(x, x)
ret = self.failUnlessSubstring(x, z)
self.failUnlessEqual(ret, x, 'should return first parameter')
self.failUnlessRaises(self.failureException,
self.failUnlessSubstring, x, y)
self.failUnlessRaises(self.failureException,
self.failUnlessSubstring, z, x)
def test_failIfSubstring(self):
x = "cat"
y = "the dog sat"
z = "the cat sat"
self.failIfSubstring(z, x)
ret = self.failIfSubstring(x, y)
self.failUnlessEqual(ret, x, 'should return first parameter')
self.failUnlessRaises(self.failureException,
self.failIfSubstring, x, x)
self.failUnlessRaises(self.failureException,
self.failIfSubstring, x, z)
def test_assertFailure(self):
d = defer.maybeDeferred(lambda: 1/0)
return self.assertFailure(d, ZeroDivisionError)
def test_assertFailure_wrongException(self):
d = defer.maybeDeferred(lambda: 1/0)
self.assertFailure(d, OverflowError)
d.addCallbacks(lambda x: self.fail('Should have failed'),
lambda x: x.trap(self.failureException))
return d
def test_assertFailure_noException(self):
d = defer.succeed(None)
self.assertFailure(d, ZeroDivisionError)
d.addCallbacks(lambda x: self.fail('Should have failed'),
lambda x: x.trap(self.failureException))
return d
def test_assertFailure_moreInfo(self):
"""In the case of assertFailure failing, check that we get lots of
information about the exception that was raised.
"""
try:
1/0
except ZeroDivisionError:
f = failure.Failure()
d = defer.fail(f)
d = self.assertFailure(d, RuntimeError)
d.addErrback(self._checkInfo, f)
return d
def _checkInfo(self, assertionFailure, f):
assert assertionFailure.check(self.failureException)
output = assertionFailure.getErrorMessage()
self.assertIn(f.getErrorMessage(), output)
self.assertIn(f.getBriefTraceback(), output)
def test_assertFailure_masked(self):
"""A single wrong assertFailure should fail the whole test.
"""
class ExampleFailure(Exception):
pass
class TC(unittest.TestCase):
failureException = ExampleFailure
def test_assertFailure(self):
d = defer.maybeDeferred(lambda: 1/0)
self.assertFailure(d, OverflowError)
self.assertFailure(d, ZeroDivisionError)
return d
test = TC('test_assertFailure')
result = reporter.TestResult()
test.run(result)
self.assertEqual(1, len(result.failures))
class TestAssertionNames(unittest.TestCase):
"""Tests for consistency of naming within TestCase assertion methods
"""
def _getAsserts(self):
dct = {}
reflect.accumulateMethods(self, dct, 'assert')
return [ dct[k] for k in dct if not k.startswith('Not') and k != '_' ]
def _name(self, x):
return x.__name__
def test_failUnless_matches_assert(self):
asserts = self._getAsserts()
failUnlesses = reflect.prefixedMethods(self, 'failUnless')
self.failUnlessEqual(dsu(asserts, self._name),
dsu(failUnlesses, self._name))
def test_failIf_matches_assertNot(self):
asserts = reflect.prefixedMethods(unittest.TestCase, 'assertNot')
failIfs = reflect.prefixedMethods(unittest.TestCase, 'failIf')
self.failUnlessEqual(dsu(asserts, self._name),
dsu(failIfs, self._name))
def test_equalSpelling(self):
for name, value in vars(self).items():
if not callable(value):
continue
if name.endswith('Equal'):
self.failUnless(hasattr(self, name+'s'),
"%s but no %ss" % (name, name))
self.failUnlessEqual(value, getattr(self, name+'s'))
if name.endswith('Equals'):
self.failUnless(hasattr(self, name[:-1]),
"%s but no %s" % (name, name[:-1]))
self.failUnlessEqual(value, getattr(self, name[:-1]))
| {
"content_hash": "900fe65f6547e4cc84dc5232920a32b6",
"timestamp": "",
"source": "github",
"line_count": 351,
"max_line_length": 80,
"avg_line_length": 39.01424501424501,
"alnum_prop": 0.5709799912370381,
"repo_name": "santisiri/popego",
"id": "0d74e32b69a962bdb7de4edccef204f282f24a5a",
"size": "13694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/trial/test/test_assertions.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
} |
from skrutil import string_utils
from skrutil.string_utils import indent
_JNI_BR = '\n\n'
_JNI_SPACE = ' '
class JniClass:
"""JNI part generator, responsible for generating JNI source code for Object and ObjectManager.
"""
def __init__(self, group_name, class_name, jni_variable_list, jni_manager_or_none):
"""Init JniClass with necessary parameters.
Args:
group_name: A string which is the C++ folder name.
class_name: A string which is the C++ class name.
jni_variable_list: List of <JniVariable>.
jni_manager_or_none: <JniManager>.
"""
self.__group_name = group_name
self.__class_name = class_name
self.__jni_var_list = jni_variable_list
self.__jni_manager_or_none = jni_manager_or_none
if self.__jni_manager_or_none is not None:
self.__jni_manager_or_none.set_object_name(class_name, class_name + 's')
self.__jni_manager_or_none.set_jni_variable_list(jni_variable_list)
self.__jni_manager_or_none.set_group_name(group_name)
self.__def_cpp = '#ifdef __cplusplus\nextern "C" {\n#endif'
self.__end_def_cpp = '#ifdef __cplusplus\n}\n#endif'
def generate_header(self):
"""Gets JNI object header. It is not required, so since 5.0, we don't use this method anymore.
Returns:
A string which is the declaration of JNI object header.
"""
file_name = 'com_lesschat_core_{0}_{1}.h'.format(self.__group_name, self.__class_name)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
def_guard = '#ifndef _Included_com_lesschat_core_{0}_{1}\n#define _Included_com_lesschat_core_{0}_{1}'.format(
self.__group_name, self.__class_name)
end_def_guard = '#endif'
output_header.write('#include <jni.h>')
output_header.write(_JNI_BR)
output_header.write(def_guard + '\n')
output_header.write(self.__def_cpp + _JNI_BR)
# release method
output_header.write(self.__release())
output_header.write(_JNI_BR)
for jni_var in self.__jni_var_list:
output_header.write(jni_var.getter())
output_header.write(_JNI_BR)
output_header.write(_JNI_BR)
output_header.write(self.__end_def_cpp + '\n')
output_header.write(end_def_guard + '\n')
def generate_implementation(self):
"""Gets JNI implementation which is used before 4.0.
Returns:
A string which is JNI object implementation.
"""
file_name = 'com_lesschat_core_{0}_{1}.cc'.format(self.__group_name, self.__class_name)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
style_class_name = string_utils.cpp_class_name_to_cpp_file_name(self.__class_name)
header_name = 'com_lesschat_core_{0}_{1}.h'.format(self.__group_name, self.__class_name)
cpp_name = '{0}/{1}.h'.format(self.__group_name, style_class_name)
output_header.write('#include "{0}"'.format(header_name) + '\n')
output_header.write('#include "{0}"'.format(cpp_name) + '\n')
output_header.write('#include "utils/android/jni_helper.h"')
output_header.write(_JNI_BR)
output_header.write(self.__def_cpp + _JNI_BR)
# release method
output_header.write(self.__release_impl())
output_header.write(_JNI_BR)
for jni_var in self.__jni_var_list:
output_header.write(jni_var.getter_impl())
output_header.write(_JNI_BR)
output_header.write(self.__end_def_cpp)
def generate_jni_helper_implementation(self, config):
"""Gets JNI helper object converting method implementation & declaration.
Returns:
A string which is JNI helper object converting method implementation & declaration.
"""
file_name = 'jni_helper_{0}.cc'.format(self.__class_name)
file_path = 'build/jni/' + file_name
output_cc = open(file_path, 'w')
impl = '// Copy belows to core/utils/android/jni_helper.h\n\n\n'
impl += '{0}\n\n'.format(self.__jni_get_jobject_by_core_object_declaration())
impl += self.__jni_get_jobjects_array_by_core_objects_declaration() + '\n'
impl += '\n\n\n'
impl += '// Copy belows to core/utils/android/jni_helper.cc\n\n\n'
impl += self.__jni_get_jobject_by_core_object_implementation(config)
impl += '\n\n'
impl += self.__jni_get_jobjects_array_by_core_objects_implementation()
impl += '\n'
output_cc.write(impl)
def generate_manager_header(self):
"""Gets JNI object manager header. It is not required, so since 5.0, we don't use this method anymore.
Returns:
A string which is the declaration of JNI object manager header.
"""
if self.__jni_manager_or_none is None:
return
jni_manager = self.__jni_manager_or_none
file_name = 'com_lesschat_core_{0}_{1}Manager.h'.format(self.__group_name, self.__class_name)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
def_header = '#ifndef _Included_com_lesschat_core_{0}_{1}Manager\n' \
'#define _Included_com_lesschat_core_{0}_{1}Manager'
def_guard = def_header.format(self.__group_name, self.__class_name)
end_def_guard = '#endif'
output_header.write('#include <jni.h>' + _JNI_BR)
output_header.write(def_guard + '\n')
output_header.write(self.__def_cpp + _JNI_BR)
output_header.write(jni_manager.generate_fetch_declarations())
output_header.write(jni_manager.generate_http_function_declarations())
output_header.write(self.__end_def_cpp + '\n')
output_header.write(end_def_guard + '\n')
def generate_manager_implementation(self, version, config):
"""Gets JNI object manager implementation.
Args:
version: A float version number of <JniModelXmlParser>.
config: A <Config> object describes user-defined names.
Returns:
A string which is JNI object manager implementation.
"""
if self.__jni_manager_or_none is None:
return
jni_manager = self.__jni_manager_or_none
file_name = '{2}_{0}_{1}Manager.cc'.format(self.__group_name, self.__class_name, config.jni_package_path)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
cpp_name = '#include "{0}/{1}_manager.h"\n'\
.format(self.__group_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
output_header.write(cpp_name)
output_header.write('#include "utils/android/jni_helper.h"')
output_header.write(_JNI_BR)
output_header.write('#include "utils/android/jni_ref_cache.h"' + '\n\n')
output_header.write('#include "utils/memory_cache.h"' + '\n\n')
output_header.write(self.__def_cpp)
output_header.write(_JNI_BR)
manager_jclass = '{0}_manager_jclass'.format(self.__class_name)
define_manager_jclass = 'static std::string {0} = "{1}/{2}/{3}Manager";'\
.format(manager_jclass, config.java_package_path, self.__group_name, self.__class_name)
output_header.write(define_manager_jclass)
if version >= 7:
output_header.write(jni_manager.generate_http_java_interface(manager_jclass))
output_header.write("\n\n")
output_header.write(jni_manager.generate_fetch_implementations(version, config))
if version < 7:
output_header.write(jni_manager.generate_http_function_implementations(config))
else:
output_header.write(jni_manager.generate_http_function_implementations_v2(config))
output_header.write(self.__end_def_cpp + '\n')
def __release(self):
return self.__release_method_name() + '\n' + ' (JNIEnv *, jobject, jlong);'
def __release_method_name(self):
return 'JNIEXPORT void JNICALL Java_com_lesschat_core_{0}_{1}_nativeRelease{1}'.\
format(self.__group_name, self.__class_name)
def __release_impl(self):
method_name = self.__release_method_name()
para_name = ' (JNIEnv *env, jobject thiz, jlong handler)'
step_1 = 'lesschat::{0}* {1} = reinterpret_cast<lesschat::{0}*>(handler);'\
.format(self.__class_name, string_utils.first_char_to_lower(self.__class_name))
step_2 = 'LCC_SAFE_DELETE({0});'.format(string_utils.first_char_to_lower(self.__class_name))
return method_name + '\n' + para_name + '{{\n {0}\n {1}\n}}'.format(step_1, step_2)
def __jni_get_jobject_by_core_object_declaration(self):
return 'static jobject GetJ{0}ByCore{0}(const {0}& {1});'.format(
self.__class_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
def __jni_get_jobject_by_core_object_implementation(self, config):
impl = 'jobject JniHelper::GetJ{0}ByCore{0}(const {0}& {1}) {{\n'.format(
self.__class_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
impl += indent(2) + 'JNIEnv* env = GetJniEnv();\n'
impl += indent(2) + 'if (!env) {\n'
impl += indent(4) + 'sakura::log_error("Failed to get JNIEnv");\n'
impl += indent(4) + 'return nullptr;\n'
impl += indent(2) + '}\n\n'
impl += indent(2) + 'jclass {0}Jclass = JniReferenceCache::SharedCache()->{1}_jclass();\n'.format(
string_utils.first_char_to_lower(self.__class_name),
string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
impl += indent(2) + 'jmethodID {0}ConstructorMethodID = env->GetMethodID({0}Jclass, "<init>", "('.format(
string_utils.first_char_to_lower(self.__class_name))
for jni_var in self.__jni_var_list:
impl += jni_var.var_type.to_jni_signature()
impl += ')V");\n\n'
for jni_var in self.__jni_var_list:
impl += indent(2) + jni_var.jni_var_assignment_by_cpp_variable(config) + '\n'
impl += '\n'
constructor_fst_line = indent(2) + 'jobject j{0}Object = env->NewObject('.format(self.__class_name)
num_constructor_indent = len(constructor_fst_line)
impl += constructor_fst_line
parameters = []
jclass_instance_name = '{0}Jclass'.format(string_utils.first_char_to_lower(self.__class_name))
constructor_method_id = '{0}ConstructorMethodID'.format(string_utils.first_char_to_lower(self.__class_name))
parameters.append(constructor_method_id)
for jni_var in self.__jni_var_list:
parameters.append('j{0}'.format(string_utils.to_title_style_name(jni_var.name)))
impl += jclass_instance_name + ',\n'
for parameter in parameters:
impl += indent(num_constructor_indent) + parameter + ',\n'
impl = impl[:-2]
impl += ');'
impl += '\n'
for jni_var in self.__jni_var_list:
delete_method = jni_var.jni_delete_local_ref()
if delete_method != '':
impl += indent(2) + delete_method + '\n'
impl += '\n'
impl += indent(2) + 'return j{0}Object;'.format(self.__class_name)
impl += '\n'
impl += '}\n'
impl += '\n'
return impl
def __jni_get_jobjects_array_by_core_objects_declaration(self):
return 'static jobjectArray GetJ{0}sArrayByCore{0}s(const std::vector<std::unique_ptr<{0}>>& {1}s);'.format(
self.__class_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
def __jni_get_jobjects_array_by_core_objects_implementation(self):
object_name = string_utils.cpp_class_name_to_cpp_file_name(self.__class_name)
impl = 'jobjectArray JniHelper::GetJ{0}sArrayByCore{0}s(const std::vector<std::unique_ptr<{0}>>& {1}s) {{'.format(
self.__class_name, object_name)
impl += '\n'
impl += indent(2) + 'jclass {0}Jclass = JniReferenceCache::SharedCache()->{1}_jclass();\n'.format(
string_utils.first_char_to_lower(self.__class_name),
object_name)
impl += indent(2) + 'JNIEnv* env = GetJniEnv();\n'
impl += indent(2) + 'if (!env) {\n'
impl += indent(4) + 'return env->NewObjectArray(0, {0}Jclass, NULL);\n'.format(
string_utils.first_char_to_lower(self.__class_name))
impl += indent(2) + '}\n\n'
impl += indent(2) + 'jobjectArray jobjs = env->NewObjectArray({0}s.size(), {1}Jclass, NULL);\n\n'.format(
object_name,
string_utils.first_char_to_lower(self.__class_name))
impl += indent(2) + 'jsize i = 0;\n'
impl += indent(2) + 'for (auto it = {0}s.begin(); it != {0}s.end(); ++it) {{\n'.format(object_name)
impl += indent(4) + 'jobject j{0} = GetJ{0}ByCore{0}(**it);\n'.format(self.__class_name)
impl += indent(4) + 'env->SetObjectArrayElement(jobjs, i, j{0});\n'.format(self.__class_name)
impl += indent(4) + 'env->DeleteLocalRef(j{0});\n'.format(self.__class_name)
impl += indent(4) + '++i;\n'
impl += indent(2) + '}\n'
impl += indent(2) + 'return jobjs;\n'
impl += '}'
return impl
| {
"content_hash": "4d81d30a68173b3ab4b986c1924cd384",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 122,
"avg_line_length": 43.75816993464052,
"alnum_prop": 0.5944734876773712,
"repo_name": "DaYeSquad/cpp-core-model-builder",
"id": "46a0521000f9f969487580a9ea72f3d8c53c7959",
"size": "13472",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "skr_jni_builder/jni_class.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "298777"
}
],
"symlink_target": ""
} |
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.21
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kinow_client
from kinow_client.rest import ApiException
from kinow_client.models.prepayment_balance import PrepaymentBalance
class TestPrepaymentBalance(unittest.TestCase):
""" PrepaymentBalance unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testPrepaymentBalance(self):
"""
Test PrepaymentBalance
"""
model = kinow_client.models.prepayment_balance.PrepaymentBalance()
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "17bddfc61d51600f11097bd2ebf02e26",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 19.275,
"alnum_prop": 0.6770428015564203,
"repo_name": "kinow-io/kinow-python-sdk",
"id": "65be9623d7bef264fc3eba3564b9f1feccef6185",
"size": "788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_prepayment_balance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4659182"
},
{
"name": "Shell",
"bytes": "1666"
}
],
"symlink_target": ""
} |
import logging
import threading
LOG = logging.getLogger(__name__)
def version_is_compatible(imp_version, version):
"""Determine whether versions are compatible.
:param imp_version: The version implemented
:param version: The version requested by an incoming message.
"""
version_parts = version.split('.')
imp_version_parts = imp_version.split('.')
try:
rev = version_parts[2]
except IndexError:
rev = 0
try:
imp_rev = imp_version_parts[2]
except IndexError:
imp_rev = 0
if int(version_parts[0]) != int(imp_version_parts[0]): # Major
return False
if int(version_parts[1]) > int(imp_version_parts[1]): # Minor
return False
if (int(version_parts[1]) == int(imp_version_parts[1]) and
int(rev) > int(imp_rev)): # Revision
return False
return True
class DispatcherExecutorContext(object):
"""Dispatcher executor context helper
A dispatcher can have work to do before and after the dispatch of the
request in the main server thread while the dispatcher itself can be
done in its own thread.
The executor can use the helper like this:
callback = dispatcher(incoming)
callback.prepare()
thread = MyWhateverThread()
thread.on_done(callback.done)
thread.run(callback.run)
"""
def __init__(self, incoming, dispatch, executor_callback=None,
post=None):
self._result = None
self._incoming = incoming
self._dispatch = dispatch
self._post = post
self._executor_callback = executor_callback
def run(self):
"""The incoming message dispath itself
Can be run in an other thread/greenlet/corotine if the executor is
able to do it.
"""
try:
self._result = self._dispatch(self._incoming,
self._executor_callback)
except Exception:
msg = 'The dispatcher method must catches all exceptions'
LOG.exception(msg)
raise RuntimeError(msg)
def done(self):
"""Callback after the incoming message have been dispathed
Should be ran in the main executor thread/greenlet/corotine
"""
# FIXME(sileht): this is not currently true, this works only because
# the driver connection used for polling write on the wire only to
# ack/requeue message, but what if one day, the driver do something
# else
if self._post is not None:
self._post(self._incoming, self._result)
def fetch_current_thread_functor():
# Until https://github.com/eventlet/eventlet/issues/172 is resolved
# or addressed we have to use complicated workaround to get a object
# that will not be recycled; the usage of threading.current_thread()
# doesn't appear to currently be monkey patched and therefore isn't
# reliable to use (and breaks badly when used as all threads share
# the same current_thread() object)...
try:
import eventlet
from eventlet import patcher
green_threaded = patcher.is_monkey_patched('thread')
except ImportError:
green_threaded = False
if green_threaded:
return lambda: eventlet.getcurrent()
else:
return lambda: threading.current_thread()
class DummyLock(object):
def acquire(self):
pass
def release(self):
pass
def __enter__(self):
self.acquire()
def __exit__(self, type, value, traceback):
self.release()
| {
"content_hash": "b28eec222211ca90c98cef81928faf55",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 76,
"avg_line_length": 31.252173913043478,
"alnum_prop": 0.6288258208124652,
"repo_name": "apporc/oslo.messaging",
"id": "cec94bb48ae4bbf8b4e8fa87ec7dcb829006c002",
"size": "4201",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "oslo_messaging/_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "767728"
},
{
"name": "Shell",
"bytes": "7250"
}
],
"symlink_target": ""
} |
import datetime
import random
from salmon.metrics import models
def generate_sample_data(point_numbers, interval):
"""
This function generates sample data and populates the databases
:point_numbers: is an int defining the number of values for each metric
:interval: is an int defining the interval between each results
This method returns a list of metrics
"""
src_names = ['test.example.com', 'salmon.example.com', 'www.example.com']
sources = []
for name in src_names:
sources.append(models.Source.objects.get_or_create(name=name)[0])
sources.append(None)
metric_names = ['ps.virtual_memory_usage',
'ps.disk_io_counters.write_bytes',
'ps.disk_io_counters.read_bytes',
'ps.disk_usage.percent',
'ps.physical_memory_usage.percent']
for source in sources:
for name in metric_names:
metric = models.Metric.objects.get_or_create(source=source,
name=name)[0]
start = datetime.datetime.now() - datetime.timedelta(
minutes=interval * point_numbers)
for i in range(point_numbers):
metric.latest_value = random.randint(1, 100)
metric.last_updated = (start +
datetime.timedelta(minutes=interval * i))
metric.save()
| {
"content_hash": "6df19aa6a758b87221537243cdbfe981",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 80,
"avg_line_length": 39.432432432432435,
"alnum_prop": 0.5812200137080192,
"repo_name": "lincolnloop/salmon",
"id": "778650437f43b701972ba8376403067f73f3cfe3",
"size": "1459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salmon/metrics/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "786"
},
{
"name": "Python",
"bytes": "53812"
}
],
"symlink_target": ""
} |
import copy
import tempfile
import uuid
import jsonschema
import mock
from oslo_serialization import jsonutils as json
from oslo_utils import uuidutils
from sahara import context
from sahara.db.templates import api as template_api
from sahara.db.templates import utils as u
from sahara.tests.unit.conductor import base
from sahara.tests.unit.db.templates import common as c
cluster_json = {
"plugin_name": "vanilla",
"hadoop_version": "2.6.0",
"node_groups": [
{
"name": "worker",
"count": 3,
"node_group_template_id": "{vanilla-260-default-worker}"
},
{
"name": "master",
"count": 1,
"node_group_template_id": "{vanilla-260-default-master}"
}
],
"name": "vanilla-260-default-cluster",
"neutron_management_network": "{neutron_management_network}",
"cluster_configs": {}
}
master_json = {
"plugin_name": "vanilla",
"hadoop_version": "2.6.0",
"node_processes": [
"namenode",
"resourcemanager",
"hiveserver"
],
"name": "vanilla-260-default-master",
"floating_ip_pool": "{floating_ip_pool}",
"flavor_id": "{flavor_id}",
"auto_security_group": "{auto_security_group}",
'security_groups': "{security_groups}"
}
worker_json = {
"plugin_name": "vanilla",
"hadoop_version": "2.6.0",
"node_processes": [
"nodemanager",
"datanode"
],
"name": "vanilla-260-default-worker",
"floating_ip_pool": "{floating_ip_pool}",
"flavor_id": "{flavor_id}",
"auto_security_group": "{auto_security_group}",
'security_groups': "{security_groups}"
}
class Config(c.Config):
def __init__(self, option_values={}):
if "name" not in option_values:
option_values["name"] = "update"
super(Config, self).__init__(option_values)
class TemplateUpdateTestCase(base.ConductorManagerTestCase):
def setUp(self):
super(TemplateUpdateTestCase, self).setUp()
self.logger = c.Logger()
template_api.set_logger(self.logger)
@mock.patch("sahara.utils.api_validator.ApiValidator.validate")
def test_check_cluster_templates_valid(self, validate):
self.logger.clear_log()
ng_templates = [{"template": c.SAMPLE_NGT,
"path": "/foo"}]
# Reference the node group template by name
clt = copy.copy(c.SAMPLE_CLT)
clt["node_groups"] = [
{"name": "test",
"count": 1,
"node_group_template_id": "{%s}" % c.SAMPLE_NGT["name"]}
]
cl_templates = [{"template": clt,
"path": "/bar"}]
# Test failed validation
validate.side_effect = jsonschema.ValidationError("mistake")
res = template_api.check_cluster_templates_valid(ng_templates,
cl_templates)
self.assertTrue(res)
msg = "Validation for /bar failed, mistake"
self.assertIn(msg, self.logger.warnings)
# Validation passes, name replaced
validate.side_effect = None
self.logger.clear_log()
res = template_api.check_cluster_templates_valid(ng_templates,
cl_templates)
self.assertFalse(res)
node_groups = validate.call_args[0][0]["node_groups"]
self.assertTrue(uuidutils.is_uuid_like(
node_groups[0]["node_group_template_id"]))
def test_add_config_section(self):
# conf here can't be a mock.Mock() because hasattr will
# return true
conf = Config()
conf.register_group = mock.Mock()
conf.register_opts = mock.Mock()
template_api.set_conf(conf)
opts = ["option"]
# Named config section
template_api.add_config_section("section", opts)
self.assertEqual(1, conf.register_group.call_count)
config_group = conf.register_group.call_args[0][0]
self.assertEqual("section", config_group.name)
self.assertEqual([
mock.call(opts, config_group)], conf.register_opts.call_args_list)
conf.register_group.reset_mock()
conf.register_opts.reset_mock()
# No config section, opts should be registered against
# the default section
template_api.add_config_section(None, opts)
conf.register_group.assert_not_called()
conf.register_opts.assert_called_with(opts)
@mock.patch("sahara.db.templates.api.add_config_section")
def test_add_config_section_for_template(self, add_config_section):
conf = mock.Mock()
conf.list_all_sections = mock.Mock()
template_api.set_conf(conf)
# No config sections
conf.list_all_sections.return_value = []
ngt = c.SAMPLE_NGT
template_api.add_config_section_for_template(ngt)
add_config_section.assert_called_with(None,
template_api.all_template_opts)
add_config_section.reset_mock()
# Add config section matching plugin
conf.list_all_sections.return_value += [ngt["plugin_name"]]
template_api.add_config_section_for_template(ngt)
add_config_section.assert_called_with(ngt["plugin_name"],
template_api.all_template_opts)
add_config_section.reset_mock()
# Add config section matching plugin and version
section = "{plugin_name}_{hadoop_version}".format(**ngt)
conf.list_all_sections.return_value += [section]
template_api.add_config_section_for_template(ngt)
add_config_section.assert_called_with(section,
template_api.all_template_opts)
add_config_section.reset_mock()
# Add config section matching plugin, version and name
section = "{plugin_name}_{hadoop_version}_{name}".format(**ngt)
conf.list_all_sections.return_value += [section]
template_api.add_config_section_for_template(ngt)
add_config_section.assert_called_with(
section,
template_api.node_group_template_opts)
add_config_section.reset_mock()
# Add config section matching name
section = "{name}".format(**ngt)
conf.list_all_sections.return_value += [section]
template_api.add_config_section_for_template(ngt)
add_config_section.assert_called_with(
section,
template_api.node_group_template_opts)
add_config_section.reset_mock()
def test_substitute_config_values_ngt(self):
ngt = copy.copy(c.SAMPLE_NGT)
ngt["flavor_id"] = "{flavor_id}"
ngt["floating_ip_pool"] = "{floating_ip_pool}"
configs = {"flavor_id": "2",
"floating_ip_pool": None}
template_api.substitute_config_values(configs, ngt, "/path")
self.assertEqual("2", ngt["flavor_id"])
self.assertIsNone(ngt["floating_ip_pool"])
def test_substitute_config_values_clt(self):
clt = copy.copy(c.SAMPLE_CLT)
clt["neutron_management_network"] = "{neutron_management_network}"
clt["default_image_id"] = "{default_image_id}"
netid = str(uuid.uuid4())
configs = {"neutron_management_network": netid,
"default_image_id": None}
template_api.substitute_config_values(configs, clt, "/path")
self.assertEqual(netid, clt["neutron_management_network"])
self.assertIsNone(clt["default_image_id"])
def _write_files(self, tempdir, templates):
files = []
for template in templates:
fp = tempfile.NamedTemporaryFile(suffix=".json", mode="w",
dir=tempdir, delete=False)
json.dump(template, fp)
files.append(fp.name)
fp.close()
return files
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_process_files(self, add_config_section, get_configs):
self.logger.clear_log()
tempdir = tempfile.mkdtemp()
# This should be ignored by process files
some_other_json = {"name": "fred",
"description": "not a template"}
files = self._write_files(
tempdir, [cluster_json, master_json, worker_json, some_other_json])
get_configs.return_value = {"flavor_id": '2', 'security_groups': [],
'auto_security_group': False}
option_values = {"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
# Check that cluster and ng templates are read and returned
ng_templates, cl_templates = template_api.process_files(tempdir, files)
cl_temp_names = [f["template"]["name"] for f in cl_templates]
ng_temp_names = [f["template"]["name"] for f in ng_templates]
self.assertEqual([cluster_json["name"]], cl_temp_names)
self.assertEqual([master_json["name"],
worker_json["name"]], ng_temp_names)
# Plugin name/version filtering applied
option_values = {"plugin_name": "vanilla",
"plugin_version": "2.6.0"}
template_api.set_conf(Config(option_values))
ng_templates, cl_templates = template_api.process_files(tempdir, files)
self.assertEqual(1, len(cl_templates))
self.assertEqual(2, len(ng_templates))
option_values = {"plugin_name": "vanilla",
"plugin_version": "1.2.1"}
template_api.set_conf(Config(option_values))
ng_templates, cl_templates = template_api.process_files(tempdir, files)
self.assertEqual(0, len(cl_templates))
self.assertEqual(0, len(ng_templates))
option_values = {"plugin_name": "hdp",
"plugin_version": "2.6.0"}
template_api.set_conf(Config(option_values))
ng_templates, cl_templates = template_api.process_files(tempdir, files)
self.assertEqual(0, len(cl_templates))
self.assertEqual(0, len(ng_templates))
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_process_files_validation_error(self, add_config_section,
get_configs):
self.logger.clear_log()
tempdir = tempfile.mkdtemp()
files = self._write_files(
tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {
"flavor_id": '2',
'security_groups': [],
'auto_security_group': False
}
option_values = {"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
# Bad JSON validation for ng should cause all files to be skipped
bad_worker = copy.copy(worker_json)
bad_worker["my_dog"] = ["fido"]
new_file = self._write_files(tempdir, [bad_worker])[0]
ng_templates, cl_templates = template_api.process_files(
tempdir, files + [new_file])
self.assertEqual(0, len(ng_templates))
self.assertEqual(0, len(cl_templates))
msg = ("Validation for {path} failed, "
"Additional properties are not allowed".format(path=new_file))
self.assertTrue(self.logger.warnings[0].startswith(msg))
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_process_files_bad_json(self, add_config_section, get_configs):
self.logger.clear_log()
tempdir = tempfile.mkdtemp()
files = self._write_files(
tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {"flavor_id": '2', 'security_groups': [],
'auto_security_group': False}
option_values = {"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
# Invalid JSON should cause all files to be skipped
fp = tempfile.NamedTemporaryFile(suffix=".json",
dir=tempdir, delete=False)
fp.write(b"not json")
files += [fp.name]
fp.close()
ng_templates, cl_templates = template_api.process_files(tempdir, files)
self.assertEqual(0, len(ng_templates))
self.assertEqual(0, len(cl_templates))
msg = ("Error processing {name}".format(name=files[-1]))
self.assertTrue(self.logger.warnings[0].startswith(msg))
msg = ("Skipping processing for {dir}, "
"error processing files".format(dir=tempdir))
self.assertEqual(msg, self.logger.warnings[1])
def test_add_node_group_templates(self):
self.logger.clear_log()
ctx = context.ctx()
# Create a record that will be updated in the db
existing = copy.copy(c.SAMPLE_NGT)
existing = self.api.node_group_template_create(ctx, existing)
# Create the update
update = copy.copy(c.SAMPLE_NGT)
update["flavor_id"] = "6"
# Create a record that will be new in the db
new = copy.copy(c.SAMPLE_NGT)
new["name"] = "new_name"
ngts = [{"template": update, "path": "foo"},
{"template": new, "path": "bar"}]
ng_info, error = template_api.add_node_group_templates(ctx, ngts)
self.assertFalse(error)
new = self.api.node_group_template_get_all(ctx, name=new["name"])[0]
self.assertIsNotNone(new)
# ng_info["created"] is a list of templates that were created
self.assertEqual(1, len(ng_info["created"]))
self.assertEqual(new["id"], ng_info["created"][0]["id"])
# ng_info["updated"] is a list of tuples for templates that
# were updated. First element in the tuple is the template,
# second is a dictionary of fields that were updated.
self.assertEqual(1, len(ng_info["updated"]))
self.assertEqual(existing["id"], ng_info["updated"][0][0]["id"])
self.assertEqual({"flavor_id": "42"}, ng_info["updated"][0][1])
# ng_info["dict"] is a dictionary of name/id pairs
self.assertEqual({new["name"]: new["id"],
existing["name"]: existing["id"]}, ng_info["ids"])
msg = ("Created node group template {info} from bar".format(
info=u.name_and_id(new)))
self.assertIn(msg, self.logger.infos)
msg = ("Updated node group template {info} from foo".format(
info=u.name_and_id(existing)))
self.assertIn(msg, self.logger.infos)
self.api.node_group_template_destroy(ctx, new["id"],
ignore_default=True)
self.api.node_group_template_destroy(ctx, existing["id"],
ignore_default=True)
@mock.patch("sahara.conductor.API.node_group_template_update")
@mock.patch("sahara.db.templates.api.reverse_node_group_template_creates")
@mock.patch("sahara.db.templates.api.reverse_node_group_template_updates")
def test_add_node_group_templates_update_failed(self,
reverse_updates,
reverse_creates,
ng_update):
self.logger.clear_log()
ctx = context.ctx()
ng_update.side_effect = Exception("mistake")
# Create a record that will be updated in the db
existing = copy.copy(c.SAMPLE_NGT)
existing = self.api.node_group_template_create(ctx, existing)
# Create the update
update = copy.copy(c.SAMPLE_NGT)
update["flavor_id"] = "6"
# Create a record that will be new in the db
new = copy.copy(c.SAMPLE_NGT)
new["name"] = "new_name"
ngts = [{"template": new, "path": "bar"},
{"template": update, "path": "foo"}]
ng_info, error = template_api.add_node_group_templates(ctx, ngts)
new = self.api.node_group_template_get_all(ctx, name=new["name"])[0]
self.assertTrue(error)
self.assertEqual(1, reverse_creates.call_count)
# call should have been (ctx, [new])
self.assertEqual(new["id"], reverse_creates.call_args[0][1][0]["id"])
self.assertEqual(1, reverse_updates.call_count)
msg = ("Update of node group template {info} failed, mistake".format(
info=u.name_and_id(existing)))
self.assertIn(msg, self.logger.warnings)
self.api.node_group_template_destroy(ctx, new["id"],
ignore_default=True)
self.api.node_group_template_destroy(ctx, existing["id"],
ignore_default=True)
@mock.patch("sahara.conductor.API.node_group_template_create")
@mock.patch("sahara.db.templates.api.reverse_node_group_template_creates")
@mock.patch("sahara.db.templates.api.reverse_node_group_template_updates")
def test_add_node_group_templates_create_failed(self,
reverse_updates,
reverse_creates,
ng_create):
self.logger.clear_log()
ctx = context.ctx()
ng_create.side_effect = Exception("mistake")
# Create a record that will be updated in the db
existing = copy.copy(c.SAMPLE_NGT)
existing = self.api.node_group_template_create(ctx, existing)
# Create the update
update = copy.copy(c.SAMPLE_NGT)
update["flavor_id"] = "6"
# Create a record that will be new in the db
new = copy.copy(c.SAMPLE_NGT)
new["name"] = "new_name"
ngts = [{"template": update, "path": "foo"},
{"template": new, "path": "bar"}]
ng_info, error = template_api.add_node_group_templates(ctx, ngts)
self.assertTrue(error)
self.assertEqual(1, reverse_creates.call_count)
self.assertEqual(1, reverse_updates.call_count)
# call should have been (ctx, [(existing, updated_fields)])
self.assertEqual({"flavor_id": existing["flavor_id"]},
reverse_updates.call_args[0][1][0][1])
msg = "Creation of node group template from bar failed, mistake"
self.assertIn(msg, self.logger.warnings)
self.api.node_group_template_destroy(ctx, existing["id"],
ignore_default=True)
def test_add_cluster_templates(self):
self.logger.clear_log()
ctx = context.ctx()
# Create a record that will be updated in the db
existing = copy.copy(c.SAMPLE_CLT)
existing = self.api.cluster_template_create(ctx, existing)
# Create the update
update = copy.copy(c.SAMPLE_CLT)
update["hadoop_version"] = "1"
# Create a record that will be new in the db
new = copy.copy(c.SAMPLE_CLT)
new["name"] = "new_name"
clts = [{"template": update, "path": "foo"},
{"template": new, "path": "bar"}]
error = template_api.add_cluster_templates(ctx, clts, {})
self.assertFalse(error)
new = self.api.cluster_template_get_all(ctx, name=new["name"])[0]
self.assertIsNotNone(new)
msg = ("Created cluster template {info} from bar".format(
info=u.name_and_id(new)))
self.assertIn(msg, self.logger.infos)
msg = ("Updated cluster template {info} from foo".format(
info=u.name_and_id(existing)))
self.assertIn(msg, self.logger.infos)
self.api.cluster_template_destroy(ctx, new["id"],
ignore_default=True)
self.api.cluster_template_destroy(ctx, existing["id"],
ignore_default=True)
@mock.patch("sahara.conductor.API.cluster_template_update")
@mock.patch("sahara.db.templates.api.reverse_cluster_template_creates")
@mock.patch("sahara.db.templates.api.reverse_cluster_template_updates")
def test_add_cluster_templates_update_failed(self,
reverse_updates,
reverse_creates,
cl_update):
self.logger.clear_log()
ctx = context.ctx()
cl_update.side_effect = Exception("mistake")
# Create a record that will be updated in the db
existing = copy.copy(c.SAMPLE_CLT)
existing = self.api.cluster_template_create(ctx, existing)
# Create the update
update = copy.copy(c.SAMPLE_CLT)
update["hadoop_version"] = "1"
# Create a record that will be new in the db
new = copy.copy(c.SAMPLE_CLT)
new["name"] = "new_name"
clts = [{"template": new, "path": "bar"},
{"template": update, "path": "foo"}]
error = template_api.add_cluster_templates(ctx, clts, {})
new = self.api.cluster_template_get_all(ctx, name=new["name"])[0]
self.assertTrue(error)
self.assertEqual(1, reverse_creates.call_count)
# call should have been (ctx, [new])
self.assertEqual(new["id"], reverse_creates.call_args[0][1][0]["id"])
self.assertEqual(1, reverse_updates.call_count)
msg = ("Update of cluster template {info} failed, mistake".format(
info=u.name_and_id(existing)))
self.assertIn(msg, self.logger.warnings)
self.api.cluster_template_destroy(ctx, new["id"],
ignore_default=True)
self.api.cluster_template_destroy(ctx, existing["id"],
ignore_default=True)
@mock.patch("sahara.conductor.API.cluster_template_create")
@mock.patch("sahara.db.templates.api.reverse_cluster_template_creates")
@mock.patch("sahara.db.templates.api.reverse_cluster_template_updates")
def test_add_cluster_templates_create_failed(self,
reverse_updates,
reverse_creates,
cl_create):
self.logger.clear_log()
ctx = context.ctx()
cl_create.side_effect = Exception("mistake")
# Create a record that will be updated in the db
existing = copy.copy(c.SAMPLE_CLT)
existing = self.api.cluster_template_create(ctx, existing)
# Create the update
update = copy.copy(c.SAMPLE_CLT)
update["hadoop_version"] = "1"
# Create a record that will be new in the db
new = copy.copy(c.SAMPLE_CLT)
new["name"] = "new_name"
clts = [{"template": update, "path": "foo"},
{"template": new, "path": "bar"}]
error = template_api.add_cluster_templates(ctx, clts, {})
self.assertTrue(error)
self.assertEqual(1, reverse_creates.call_count)
self.assertEqual(1, reverse_updates.call_count)
# call should have been (ctx, [(existing, updated_fields)])
# updated fields will contain hadoop_version and node_groups,
# since node_groups is modified by the conductor
updated_fields = reverse_updates.call_args[0][1][0][1]
self.assertEqual(updated_fields["hadoop_version"],
existing["hadoop_version"])
self.assertIn("node_groups", updated_fields)
msg = "Creation of cluster template from bar failed, mistake"
self.assertIn(msg, self.logger.warnings)
self.api.cluster_template_destroy(ctx, existing["id"],
ignore_default=True)
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_do_update_trash(self, add_config, get_configs):
self.logger.clear_log()
ctx = context.ctx()
tempdir = tempfile.mkdtemp()
self._write_files(tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {
"flavor_id": '2',
"neutron_management_network": str(uuid.uuid4()),
'auto_security_group': True,
'security_groups': [],
}
option_values = {"tenant_id": ctx.tenant_id,
"directory": tempdir,
"norecurse": None,
"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
template_api.do_update()
ngs = self.api.node_group_template_get_all(ctx)
ng_names = sorted([ng["name"] for ng in ngs])
self.assertEqual(sorted([master_json["name"], worker_json["name"]]),
ng_names)
clts = self.api.cluster_template_get_all(ctx)
clt_names = sorted([clt["name"] for clt in clts])
clts = self.api.cluster_template_get_all(ctx)
self.assertEqual([cluster_json["name"]], clt_names)
@mock.patch("sahara.db.templates.api.check_cluster_templates_valid")
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_do_update_cluster_invalid(self, add_config,
get_configs, clt_valid):
self.logger.clear_log()
ctx = context.ctx()
tempdir = tempfile.mkdtemp()
self._write_files(tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {
"flavor_id": '2',
"neutron_management_network": str(uuid.uuid4())
}
option_values = {"tenant_id": ctx.tenant_id,
"directory": tempdir,
"norecurse": None,
"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
clt_valid.return_value = True
template_api.do_update()
ngs = self.api.node_group_template_get_all(ctx)
self.assertEqual([], ngs)
clts = self.api.cluster_template_get_all(ctx)
self.assertEqual([], clts)
msg = ("Skipping processing for {dir}, "
"error processing cluster templates".format(dir=tempdir))
self.assertIn(msg, self.logger.warnings)
@mock.patch("sahara.db.templates.api.check_usage_of_existing")
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_do_update_existing_fails(self, add_config,
get_configs, check_existing):
self.logger.clear_log()
ctx = context.ctx()
tempdir = tempfile.mkdtemp()
self._write_files(tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {
"flavor_id": '2',
"neutron_management_network": str(uuid.uuid4())
}
option_values = {"tenant_id": ctx.tenant_id,
"directory": tempdir,
"norecurse": None,
"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
check_existing.return_value = True
template_api.do_update()
ngs = self.api.node_group_template_get_all(ctx)
self.assertEqual([], ngs)
clts = self.api.cluster_template_get_all(ctx)
self.assertEqual([], clts)
msg = ("Skipping processing for {dir}, "
"templates in use".format(dir=tempdir))
self.assertIn(msg, self.logger.warnings)
@mock.patch("sahara.db.templates.api.add_node_group_templates")
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_do_update_add_ngts_fails(self, add_config,
get_configs, add_ngts):
self.logger.clear_log()
ctx = context.ctx()
tempdir = tempfile.mkdtemp()
self._write_files(tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {
"flavor_id": '2',
"neutron_management_network": str(uuid.uuid4())
}
option_values = {"tenant_id": ctx.tenant_id,
"directory": tempdir,
"norecurse": None,
"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
add_ngts.return_value = ({}, True)
template_api.do_update()
ngs = self.api.node_group_template_get_all(ctx)
self.assertEqual([], ngs)
clts = self.api.cluster_template_get_all(ctx)
self.assertEqual([], clts)
msg = ("Skipping processing for {dir}, "
"error processing node group templates".format(dir=tempdir))
self.assertIn(msg, self.logger.warnings)
@mock.patch("sahara.db.templates.api.reverse_node_group_template_creates")
@mock.patch("sahara.db.templates.api.reverse_node_group_template_updates")
@mock.patch("sahara.db.templates.api.add_cluster_templates")
@mock.patch("sahara.db.templates.api.get_configs")
@mock.patch("sahara.db.templates.api.add_config_section_for_template")
def test_do_update_add_clts_fails(self,
add_config,
get_configs,
add_clts,
reverse_ng_updates,
reverse_ng_creates):
self.logger.clear_log()
ctx = context.ctx()
tempdir = tempfile.mkdtemp()
self._write_files(tempdir, [cluster_json, master_json, worker_json])
get_configs.return_value = {
"flavor_id": '2',
"neutron_management_network": str(uuid.uuid4())
}
option_values = {"tenant_id": ctx.tenant_id,
"directory": tempdir,
"norecurse": None,
"plugin_name": None,
"plugin_version": None}
template_api.set_conf(Config(option_values))
add_clts.return_value = True
template_api.do_update()
self.assertEqual(1, reverse_ng_creates.call_count)
self.assertEqual(1, reverse_ng_updates.call_count)
clts = self.api.cluster_template_get_all(ctx)
self.assertEqual([], clts)
msg = ("Skipping processing for {dir}, "
"error processing cluster templates".format(dir=tempdir))
self.assertIn(msg, self.logger.warnings)
| {
"content_hash": "0949ac07c347c7dfe6739548507f17c7",
"timestamp": "",
"source": "github",
"line_count": 791,
"max_line_length": 79,
"avg_line_length": 39.582806573957015,
"alnum_prop": 0.5724049824337273,
"repo_name": "ekasitk/sahara",
"id": "bb225ea95ab3a0ea50c0ef095e2e7956de30a205",
"size": "31887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sahara/tests/unit/db/templates/test_update.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3609"
},
{
"name": "Mako",
"bytes": "19620"
},
{
"name": "PigLatin",
"bytes": "792"
},
{
"name": "Python",
"bytes": "3141724"
},
{
"name": "Shell",
"bytes": "52399"
}
],
"symlink_target": ""
} |
"""
sphinx.util.pycompat
~~~~~~~~~~~~~~~~~~~~
Stuff for Python version compatibility.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import io
import sys
import codecs
import warnings
from six import class_types
from six.moves import zip_longest
from itertools import product
from six import PY3, text_type, exec_
NoneType = type(None)
# ------------------------------------------------------------------------------
# Python 2/3 compatibility
if PY3:
# Python 3
# prefix for Unicode strings
u = ''
from io import TextIOWrapper
# safely encode a string for printing to the terminal
def terminal_safe(s):
return s.encode('ascii', 'backslashreplace').decode('ascii')
# some kind of default system encoding; should be used with a lenient
# error handler
sys_encoding = sys.getdefaultencoding()
# support for running 2to3 over config files
def convert_with_2to3(filepath):
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
from lib2to3.pgen2.parse import ParseError
fixers = get_fixers_from_package('lib2to3.fixes')
refactoring_tool = RefactoringTool(fixers)
source = refactoring_tool._read_python_source(filepath)[0]
try:
tree = refactoring_tool.refactor_string(source, 'conf.py')
except ParseError as err:
# do not propagate lib2to3 exceptions
lineno, offset = err.context[1]
# try to match ParseError details with SyntaxError details
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
return text_type(tree)
from html import escape as htmlescape # noqa: >= Python 3.2
class UnicodeMixin:
"""Mixin class to handle defining the proper __str__/__unicode__
methods in Python 2 or 3."""
def __str__(self):
return self.__unicode__()
from textwrap import indent
else:
# Python 2
u = 'u'
# no need to refactor on 2.x versions
convert_with_2to3 = None
def TextIOWrapper(stream, encoding):
return codecs.lookup(encoding or 'ascii')[2](stream)
# safely encode a string for printing to the terminal
def terminal_safe(s):
return s.encode('ascii', 'backslashreplace')
# some kind of default system encoding; should be used with a lenient
# error handler
sys_encoding = __import__('locale').getpreferredencoding()
# use Python 3 name
from cgi import escape as htmlescape # noqa: 2.6, 2.7
class UnicodeMixin(object):
"""Mixin class to handle defining the proper __str__/__unicode__
methods in Python 2 or 3."""
def __str__(self):
return self.__unicode__().encode('utf8')
# backport from python3
def indent(text, prefix, predicate=None):
if predicate is None:
def predicate(line):
return line.strip()
def prefixed_lines():
for line in text.splitlines(True):
yield (prefix + line if predicate(line) else line)
return ''.join(prefixed_lines())
def execfile_(filepath, _globals, open=open):
from sphinx.util.osutil import fs_encoding
# get config source -- 'b' is a no-op under 2.x, while 'U' is
# ignored under 3.x (but 3.x compile() accepts \r\n newlines)
f = open(filepath, 'rbU')
try:
source = f.read()
finally:
f.close()
# py26 accept only LF eol instead of CRLF
if sys.version_info[:2] == (2, 6):
source = source.replace(b'\r\n', b'\n')
# compile to a code object, handle syntax errors
filepath_enc = filepath.encode(fs_encoding)
try:
code = compile(source, filepath_enc, 'exec')
except SyntaxError:
if convert_with_2to3:
# maybe the file uses 2.x syntax; try to refactor to
# 3.x syntax using 2to3
source = convert_with_2to3(filepath)
code = compile(source, filepath_enc, 'exec')
else:
raise
exec_(code, _globals)
# ------------------------------------------------------------------------------
# Internal module backwards-compatibility
class _DeprecationWrapper(object):
def __init__(self, mod, deprecated):
self._mod = mod
self._deprecated = deprecated
def __getattr__(self, attr):
if attr in self._deprecated:
warnings.warn("sphinx.util.pycompat.%s is deprecated and will be "
"removed in Sphinx 1.4, please use the standard "
"library version instead." % attr,
DeprecationWarning, stacklevel=2)
return self._deprecated[attr]
return getattr(self._mod, attr)
sys.modules[__name__] = _DeprecationWrapper(sys.modules[__name__], dict(
zip_longest = zip_longest,
product = product,
all = all,
any = any,
next = next,
open = open,
class_types = class_types,
base_exception = BaseException,
relpath = __import__('os').path.relpath,
StringIO = io.StringIO,
BytesIO = io.BytesIO,
))
| {
"content_hash": "7439e98bccf0bdfe1501f17d241d8465",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 80,
"avg_line_length": 32.130434782608695,
"alnum_prop": 0.6019717765319931,
"repo_name": "fzheng/codejam",
"id": "0daa87981efbda9b3c396f7f7015dbed1b41ed74",
"size": "5197",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/sphinx/util/pycompat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "26929"
},
{
"name": "CSS",
"bytes": "70961"
},
{
"name": "HTML",
"bytes": "80615"
},
{
"name": "Java",
"bytes": "376384"
},
{
"name": "JavaScript",
"bytes": "5201764"
},
{
"name": "Jupyter Notebook",
"bytes": "13408"
},
{
"name": "Makefile",
"bytes": "2379"
},
{
"name": "Python",
"bytes": "16542061"
},
{
"name": "Smarty",
"bytes": "22430"
},
{
"name": "TeX",
"bytes": "85477"
}
],
"symlink_target": ""
} |
import markdown
class LinkifyPattern(markdown.inlinepatterns.Pattern):
"""
Pattern to replace bare links with <a> elements.
"""
def __init__(self):
super(LinkifyPattern, self).__init__(r'(https?:\/\/[^\s\b)]+)')
def handleMatch(self, m):
e = markdown.util.etree.Element('a')
e.set('href', m.group(2))
e.text = m.group(2)
return e
| {
"content_hash": "278be1bfd1343264fc5a04d60208b3ed",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 71,
"avg_line_length": 24.625,
"alnum_prop": 0.5710659898477157,
"repo_name": "2buntu/2buntu-blog",
"id": "fb5d15032b0d0c2098b38a65a2bfeeeb60122bb7",
"size": "394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twobuntu/cmarkdown/linkify.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2977"
},
{
"name": "HTML",
"bytes": "76074"
},
{
"name": "JavaScript",
"bytes": "6310"
},
{
"name": "Python",
"bytes": "93053"
}
],
"symlink_target": ""
} |
import copy
try:
from guardian.shortcuts import assign_perm
except ImportError:
def assign_perm(perm_name, group_or_user, obj):
pass
from groups_manager.utils import get_permission_name
def assign_related(related_groups, perms, obj):
if isinstance(perms, dict):
default = set(perms.pop('default', []))
for group in related_groups:
django_group = group.django_group
for permission in default:
perm_name = get_permission_name(permission, obj)
assign_perm(perm_name, django_group, obj)
if group.group_type is not None:
for permission in set(perms.get(group.group_type.codename, [])):
perm_name = get_permission_name(permission, obj)
assign_perm(perm_name, django_group, obj)
else:
for group in related_groups:
django_group = group.django_group
for permission in list(set(perms)):
perm_name = get_permission_name(permission, obj)
assign_perm(perm_name, django_group, obj)
def assign_object_to_member(group_member, obj, **kwargs):
"""Assign an object to a GroupMember instance object.
:Parameters:
- `group_member`: groups_manager.model.GroupMember instance
- `obj`: object to set permissions
:Kwargs:
- `custom_permissions`: updates settings.GROUPS_MANAGER['PERMISSIONS']
"""
from groups_manager.settings import GROUPS_MANAGER
if GROUPS_MANAGER['AUTH_MODELS_SYNC'] and \
group_member.group.django_group and group_member.member.django_user:
roles_attr = kwargs.get('roles_attr', 'roles')
permissions = copy.deepcopy(GROUPS_MANAGER['PERMISSIONS'])
permissions.update(kwargs.get('custom_permissions', {}))
# owner
if isinstance(permissions['owner'], dict):
roles = getattr(group_member, roles_attr).values_list('codename', flat=True)
owner_perms = []
for role in list(set(roles).intersection(set(permissions['owner'].keys()))) + ['default']:
owner_perms += permissions['owner'].get(role, [])
else:
owner_perms = permissions['owner']
for permission in list(set(owner_perms)):
perm_name = get_permission_name(permission, obj)
assign_perm(perm_name, group_member.member.django_user, obj)
# group
group_perms = permissions.get('group', [])
for permission in list(set(group_perms)):
perm_name = get_permission_name(permission, obj)
assign_perm(perm_name, group_member.group.django_group, obj)
# groups_upstream
upstream_groups = group_member.group.get_ancestors()
upstream_perms = permissions.get('groups_upstream', [])
assign_related(upstream_groups, upstream_perms, obj)
# groups_downstream
downstream_groups = group_member.group.get_descendants()
downstream_perms = permissions.get('groups_downstream', [])
assign_related(downstream_groups, downstream_perms, obj)
# groups_siblings
siblings_groups = group_member.group.get_siblings()
siblings_perms = permissions.get('groups_siblings', [])
assign_related(siblings_groups, siblings_perms, obj)
def assign_object_to_group(group, obj, **kwargs):
"""Assign an object to a Group instance object.
:Parameters:
- `group`: groups_manager.model.Group instance
- `obj`: object to set permissions
:Kwargs:
- `custom_permissions`: updates settings.GROUPS_MANAGER['PERMISSIONS']
"""
from groups_manager.settings import GROUPS_MANAGER
if GROUPS_MANAGER['AUTH_MODELS_SYNC'] and group.django_group:
permissions = copy.deepcopy(GROUPS_MANAGER['PERMISSIONS'])
permissions.update(kwargs.get('custom_permissions', {}))
# owner is ignored from permissions
# group
group_perms = permissions.get('group', [])
for permission in list(set(group_perms)):
perm_name = get_permission_name(permission, obj)
assign_perm(perm_name, group.django_group, obj)
# groups_upstream
upstream_groups = group.get_ancestors()
upstream_perms = permissions.get('groups_upstream', [])
assign_related(upstream_groups, upstream_perms, obj)
# groups_downstream
downstream_groups = group.get_descendants()
downstream_perms = permissions.get('groups_downstream', [])
assign_related(downstream_groups, downstream_perms, obj)
# groups_siblings
siblings_groups = group.get_siblings()
siblings_perms = permissions.get('groups_siblings', [])
assign_related(siblings_groups, siblings_perms, obj)
| {
"content_hash": "41be803894ffb643034c538102dc6b88",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 102,
"avg_line_length": 40.18487394957983,
"alnum_prop": 0.6369719782517775,
"repo_name": "vittoriozamboni/django-groups-manager",
"id": "604f65be427afc2a7bb7f7183e2d2eae22e3ebe6",
"size": "4782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "groups_manager/perms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3376"
},
{
"name": "HTML",
"bytes": "65993"
},
{
"name": "Python",
"bytes": "151823"
},
{
"name": "SCSS",
"bytes": "126"
}
],
"symlink_target": ""
} |
import urllib2
from xml.dom import minidom, Node
answer()
say( "Welcome to the python Yahoo weather reader" )
result = ask( "Enter the ZIP code for a weather check", { 'choices' : "[5 DIGITS]" })
if result.name == 'choice' :
log( "zipCode <" + result.value + ">" )
urlRead = urllib2.urlopen('http://weather.yahooapis.com/forecastrss?p=' + result.value + '&u=f')
if urlRead :
xml = minidom.parse( urlRead )
if xml :
for node in xml.documentElement.childNodes :
if node.nodeName == "channel" :
for item_node in node.childNodes :
if item_node.nodeName == "description" :
description = ""
for text_node in item_node.childNodes:
if text_node.nodeType == node.TEXT_NODE :
description += text_node.nodeValue
if len( description ) > 0:
say( description )
if item_node.nodeName == "item" :
item = ""
for weatherItem_node in item_node.childNodes:
if weatherItem_node.nodeName == "title" :
weatherTitle = ""
for weatherText_node in weatherItem_node.childNodes :
weatherTitle += weatherText_node.nodeValue
if len( weatherTitle ) > 0 :
say( weatherTitle )
if weatherItem_node.nodeName == "yweather:condition" :
weatherCondition = weatherItem_node.getAttribute( 'temp' )
if len( weatherCondition ) > 0 :
say( "Temperature: " + weatherCondition + " degrees Fahrenheit" )
weatherCondition = weatherItem_node.getAttribute( 'text' )
if len( weatherCondition ) > 0 :
say( weatherCondition )
else :
log( "Error getting XML " )
say( "I am sorry, Error occured while fetching weather." )
say( "Thats all. Goodbye!" )
hangup()
| {
"content_hash": "ece27476887a869d55eb1b90bf060509",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 105,
"avg_line_length": 49.541666666666664,
"alnum_prop": 0.4554247266610597,
"repo_name": "tropo/tropo-samples",
"id": "fd74a508d8166c99e2857ef47d470acf42cc5f2e",
"size": "2514",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/yahooweather.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groovy",
"bytes": "44389"
},
{
"name": "JavaScript",
"bytes": "31282"
},
{
"name": "PHP",
"bytes": "53979"
},
{
"name": "Python",
"bytes": "33482"
},
{
"name": "Ruby",
"bytes": "98603"
}
],
"symlink_target": ""
} |
"""HiGHS Linear Optimization Methods
Interface to HiGHS linear optimization software.
https://www.maths.ed.ac.uk/hall/HiGHS/
.. versionadded:: 1.5.0
References
----------
.. [1] Q. Huangfu and J.A.J. Hall. "Parallelizing the dual revised simplex
method." Mathematical Programming Computation, 10 (1), 119-142,
2018. DOI: 10.1007/s12532-017-0130-5
"""
import inspect
import numpy as np
from ._optimize import _check_unknown_options, OptimizeWarning, OptimizeResult
from warnings import warn
from ._highs._highs_wrapper import _highs_wrapper
from ._highs._highs_constants import (
CONST_I_INF,
CONST_INF,
MESSAGE_LEVEL_NONE,
HIGHS_OBJECTIVE_SENSE_MINIMIZE,
MODEL_STATUS_NOTSET,
MODEL_STATUS_LOAD_ERROR,
MODEL_STATUS_MODEL_ERROR,
MODEL_STATUS_PRESOLVE_ERROR,
MODEL_STATUS_SOLVE_ERROR,
MODEL_STATUS_POSTSOLVE_ERROR,
MODEL_STATUS_MODEL_EMPTY,
MODEL_STATUS_OPTIMAL,
MODEL_STATUS_INFEASIBLE,
MODEL_STATUS_UNBOUNDED_OR_INFEASIBLE,
MODEL_STATUS_UNBOUNDED,
MODEL_STATUS_REACHED_DUAL_OBJECTIVE_VALUE_UPPER_BOUND
as MODEL_STATUS_RDOVUB,
MODEL_STATUS_REACHED_OBJECTIVE_TARGET,
MODEL_STATUS_REACHED_TIME_LIMIT,
MODEL_STATUS_REACHED_ITERATION_LIMIT,
HIGHS_SIMPLEX_STRATEGY_CHOOSE,
HIGHS_SIMPLEX_STRATEGY_DUAL,
HIGHS_SIMPLEX_CRASH_STRATEGY_OFF,
HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_CHOOSE,
HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_DANTZIG,
HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_DEVEX,
HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_STEEPEST_EDGE,
HIGHS_VAR_TYPE_CONTINUOUS,
)
from scipy.sparse import csc_matrix, vstack, issparse
def _highs_to_scipy_status_message(highs_status, highs_message):
"""Converts HiGHS status number/message to SciPy status number/message"""
scipy_statuses_messages = {
None: (4, "HiGHS did not provide a status code. "),
MODEL_STATUS_NOTSET: (4, ""),
MODEL_STATUS_LOAD_ERROR: (4, ""),
MODEL_STATUS_MODEL_ERROR: (2, ""),
MODEL_STATUS_PRESOLVE_ERROR: (4, ""),
MODEL_STATUS_SOLVE_ERROR: (4, ""),
MODEL_STATUS_POSTSOLVE_ERROR: (4, ""),
MODEL_STATUS_MODEL_EMPTY: (4, ""),
MODEL_STATUS_RDOVUB: (4, ""),
MODEL_STATUS_REACHED_OBJECTIVE_TARGET: (4, ""),
MODEL_STATUS_OPTIMAL: (0, "Optimization terminated successfully. "),
MODEL_STATUS_REACHED_TIME_LIMIT: (1, "Time limit reached. "),
MODEL_STATUS_REACHED_ITERATION_LIMIT: (1, "Iteration limit reached. "),
MODEL_STATUS_INFEASIBLE: (2, "The problem is infeasible. "),
MODEL_STATUS_UNBOUNDED: (3, "The problem is unbounded. "),
MODEL_STATUS_UNBOUNDED_OR_INFEASIBLE: (4, "The problem is unbounded "
"or infeasible. ")}
unrecognized = (4, "The HiGHS status code was not recognized. ")
scipy_status, scipy_message = (
scipy_statuses_messages.get(highs_status, unrecognized))
scipy_message = (f"{scipy_message}"
f"(HiGHS Status {highs_status}: {highs_message})")
return scipy_status, scipy_message
def _replace_inf(x):
# Replace `np.inf` with CONST_INF
infs = np.isinf(x)
x[infs] = np.sign(x[infs])*CONST_INF
return x
def _convert_to_highs_enum(option, option_str, choices):
# If option is in the choices we can look it up, if not use
# the default value taken from function signature and warn:
try:
return choices[option.lower()]
except AttributeError:
return choices[option]
except KeyError:
sig = inspect.signature(_linprog_highs)
default_str = sig.parameters[option_str].default
warn(f"Option {option_str} is {option}, but only values in "
f"{set(choices.keys())} are allowed. Using default: "
f"{default_str}.",
OptimizeWarning, stacklevel=3)
return choices[default_str]
def _linprog_highs(lp, solver, time_limit=None, presolve=True,
disp=False, maxiter=None,
dual_feasibility_tolerance=None,
primal_feasibility_tolerance=None,
ipm_optimality_tolerance=None,
simplex_dual_edge_weight_strategy=None,
**unknown_options):
r"""
Solve the following linear programming problem using one of the HiGHS
solvers:
User-facing documentation is in _linprog_doc.py.
Parameters
----------
lp : _LPProblem
A ``scipy.optimize._linprog_util._LPProblem`` ``namedtuple``.
solver : "ipm" or "simplex" or None
Which HiGHS solver to use. If ``None``, "simplex" will be used.
Options
-------
maxiter : int
The maximum number of iterations to perform in either phase. For
``solver='ipm'``, this does not include the number of crossover
iterations. Default is the largest possible value for an ``int``
on the platform.
disp : bool
Set to ``True`` if indicators of optimization status are to be printed
to the console each iteration; default ``False``.
time_limit : float
The maximum time in seconds allotted to solve the problem; default is
the largest possible value for a ``double`` on the platform.
presolve : bool
Presolve attempts to identify trivial infeasibilities,
identify trivial unboundedness, and simplify the problem before
sending it to the main solver. It is generally recommended
to keep the default setting ``True``; set to ``False`` if presolve is
to be disabled.
dual_feasibility_tolerance : double
Dual feasibility tolerance. Default is 1e-07.
The minimum of this and ``primal_feasibility_tolerance``
is used for the feasibility tolerance when ``solver='ipm'``.
primal_feasibility_tolerance : double
Primal feasibility tolerance. Default is 1e-07.
The minimum of this and ``dual_feasibility_tolerance``
is used for the feasibility tolerance when ``solver='ipm'``.
ipm_optimality_tolerance : double
Optimality tolerance for ``solver='ipm'``. Default is 1e-08.
Minimum possible value is 1e-12 and must be smaller than the largest
possible value for a ``double`` on the platform.
simplex_dual_edge_weight_strategy : str (default: None)
Strategy for simplex dual edge weights. The default, ``None``,
automatically selects one of the following.
``'dantzig'`` uses Dantzig's original strategy of choosing the most
negative reduced cost.
``'devex'`` uses the strategy described in [15]_.
``steepest`` uses the exact steepest edge strategy as described in
[16]_.
``'steepest-devex'`` begins with the exact steepest edge strategy
until the computation is too costly or inexact and then switches to
the devex method.
Curently, using ``None`` always selects ``'steepest-devex'``, but this
may change as new options become available.
unknown_options : dict
Optional arguments not used by this particular solver. If
``unknown_options`` is non-empty, a warning is issued listing all
unused options.
Returns
-------
sol : dict
A dictionary consisting of the fields:
x : 1D array
The values of the decision variables that minimizes the
objective function while satisfying the constraints.
fun : float
The optimal value of the objective function ``c @ x``.
slack : 1D array
The (nominally positive) values of the slack,
``b_ub - A_ub @ x``.
con : 1D array
The (nominally zero) residuals of the equality constraints,
``b_eq - A_eq @ x``.
success : bool
``True`` when the algorithm succeeds in finding an optimal
solution.
status : int
An integer representing the exit status of the algorithm.
``0`` : Optimization terminated successfully.
``1`` : Iteration or time limit reached.
``2`` : Problem appears to be infeasible.
``3`` : Problem appears to be unbounded.
``4`` : The HiGHS solver ran into a problem.
message : str
A string descriptor of the exit status of the algorithm.
nit : int
The total number of iterations performed.
For ``solver='simplex'``, this includes iterations in all
phases. For ``solver='ipm'``, this does not include
crossover iterations.
crossover_nit : int
The number of primal/dual pushes performed during the
crossover routine for ``solver='ipm'``. This is ``0``
for ``solver='simplex'``.
ineqlin : OptimizeResult
Solution and sensitivity information corresponding to the
inequality constraints, `b_ub`. A dictionary consisting of the
fields:
residual : np.ndnarray
The (nominally positive) values of the slack variables,
``b_ub - A_ub @ x``. This quantity is also commonly
referred to as "slack".
marginals : np.ndarray
The sensitivity (partial derivative) of the objective
function with respect to the right-hand side of the
inequality constraints, `b_ub`.
eqlin : OptimizeResult
Solution and sensitivity information corresponding to the
equality constraints, `b_eq`. A dictionary consisting of the
fields:
residual : np.ndarray
The (nominally zero) residuals of the equality constraints,
``b_eq - A_eq @ x``.
marginals : np.ndarray
The sensitivity (partial derivative) of the objective
function with respect to the right-hand side of the
equality constraints, `b_eq`.
lower, upper : OptimizeResult
Solution and sensitivity information corresponding to the
lower and upper bounds on decision variables, `bounds`.
residual : np.ndarray
The (nominally positive) values of the quantity
``x - lb`` (lower) or ``ub - x`` (upper).
marginals : np.ndarray
The sensitivity (partial derivative) of the objective
function with respect to the lower and upper
`bounds`.
mip_node_count : int
The number of subproblems or "nodes" solved by the MILP
solver. Only present when `integrality` is not `None`.
mip_dual_bound : float
The MILP solver's final estimate of the lower bound on the
optimal solution. Only present when `integrality` is not
`None`.
mip_gap : float
The difference between the final objective function value
and the final dual bound. Only present when `integrality`
is not `None`.
Notes
-----
The result fields `ineqlin`, `eqlin`, `lower`, and `upper` all contain
`marginals`, or partial derivatives of the objective function with respect
to the right-hand side of each constraint. These partial derivatives are
also referred to as "Lagrange multipliers", "dual values", and
"shadow prices". The sign convention of `marginals` is opposite that
of Lagrange multipliers produced by many nonlinear solvers.
References
----------
.. [15] Harris, Paula MJ. "Pivot selection methods of the Devex LP code."
Mathematical programming 5.1 (1973): 1-28.
.. [16] Goldfarb, Donald, and John Ker Reid. "A practicable steepest-edge
simplex algorithm." Mathematical Programming 12.1 (1977): 361-371.
"""
_check_unknown_options(unknown_options)
# Map options to HiGHS enum values
simplex_dual_edge_weight_strategy_enum = _convert_to_highs_enum(
simplex_dual_edge_weight_strategy,
'simplex_dual_edge_weight_strategy',
choices={'dantzig': HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_DANTZIG,
'devex': HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_DEVEX,
'steepest-devex': HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_CHOOSE,
'steepest':
HIGHS_SIMPLEX_DUAL_EDGE_WEIGHT_STRATEGY_STEEPEST_EDGE,
None: None})
c, A_ub, b_ub, A_eq, b_eq, bounds, x0, integrality = lp
lb, ub = bounds.T.copy() # separate bounds, copy->C-cntgs
# highs_wrapper solves LHS <= A*x <= RHS, not equality constraints
lhs_ub = -np.ones_like(b_ub)*np.inf # LHS of UB constraints is -inf
rhs_ub = b_ub # RHS of UB constraints is b_ub
lhs_eq = b_eq # Equality constaint is inequality
rhs_eq = b_eq # constraint with LHS=RHS
lhs = np.concatenate((lhs_ub, lhs_eq))
rhs = np.concatenate((rhs_ub, rhs_eq))
if issparse(A_ub) or issparse(A_eq):
A = vstack((A_ub, A_eq))
else:
A = np.vstack((A_ub, A_eq))
A = csc_matrix(A)
options = {
'presolve': presolve,
'sense': HIGHS_OBJECTIVE_SENSE_MINIMIZE,
'solver': solver,
'time_limit': time_limit,
'highs_debug_level': MESSAGE_LEVEL_NONE,
'dual_feasibility_tolerance': dual_feasibility_tolerance,
'ipm_optimality_tolerance': ipm_optimality_tolerance,
'log_to_console': disp,
'output_flag': disp,
'primal_feasibility_tolerance': primal_feasibility_tolerance,
'simplex_dual_edge_weight_strategy':
simplex_dual_edge_weight_strategy_enum,
'simplex_strategy': HIGHS_SIMPLEX_STRATEGY_DUAL,
'simplex_crash_strategy': HIGHS_SIMPLEX_CRASH_STRATEGY_OFF,
'ipm_iteration_limit': maxiter,
'simplex_iteration_limit': maxiter,
}
# np.inf doesn't work; use very large constant
rhs = _replace_inf(rhs)
lhs = _replace_inf(lhs)
lb = _replace_inf(lb)
ub = _replace_inf(ub)
if integrality is None or np.sum(integrality) == 0:
integrality = np.empty(0)
else:
integrality = np.array(integrality)
res = _highs_wrapper(c, A.indptr, A.indices, A.data, lhs, rhs,
lb, ub, integrality.astype(np.uint8), options)
# HiGHS represents constraints as lhs/rhs, so
# Ax + s = b => Ax = b - s
# and we need to split up s by A_ub and A_eq
if 'slack' in res:
slack = res['slack']
con = np.array(slack[len(b_ub):])
slack = np.array(slack[:len(b_ub)])
else:
slack, con = None, None
# lagrange multipliers for equalities/inequalities and upper/lower bounds
if 'lambda' in res:
lamda = res['lambda']
marg_ineqlin = np.array(lamda[:len(b_ub)])
marg_eqlin = np.array(lamda[len(b_ub):])
marg_upper = np.array(res['marg_bnds'][1, :])
marg_lower = np.array(res['marg_bnds'][0, :])
else:
marg_ineqlin, marg_eqlin = None, None
marg_upper, marg_lower = None, None
# this needs to be updated if we start choosing the solver intelligently
solvers = {"ipm": "highs-ipm", "simplex": "highs-ds", None: "highs-ds"}
# Convert to scipy-style status and message
highs_status = res.get('status', None)
highs_message = res.get('message', None)
status, message = _highs_to_scipy_status_message(highs_status,
highs_message)
x = np.array(res['x']) if 'x' in res else None
sol = {'x': x,
'slack': slack,
'con': con,
'ineqlin': OptimizeResult({
'residual': slack,
'marginals': marg_ineqlin,
}),
'eqlin': OptimizeResult({
'residual': con,
'marginals': marg_eqlin,
}),
'lower': OptimizeResult({
'residual': None if x is None else x - lb,
'marginals': marg_lower,
}),
'upper': OptimizeResult({
'residual': None if x is None else ub - x,
'marginals': marg_upper
}),
'fun': res.get('fun'),
'status': status,
'success': res['status'] == MODEL_STATUS_OPTIMAL,
'message': message,
'nit': res.get('simplex_nit', 0) or res.get('ipm_nit', 0),
'crossover_nit': res.get('crossover_nit'),
}
if np.any(x) and integrality is not None:
res.update({
'mip_node_count': res.get('mip_node_count', 0),
'mip_dual_bound': res.get('mip_dual_bound', 0.0),
'mip_gap': res.get('mip_gap', 0.0),
})
return sol
| {
"content_hash": "7fbb115470736ec4d0b86d3e367e7bf4",
"timestamp": "",
"source": "github",
"line_count": 431,
"max_line_length": 82,
"avg_line_length": 39.656612529002324,
"alnum_prop": 0.5994032295810906,
"repo_name": "vigna/scipy",
"id": "2f0b53c64e61a61fb683f963cb9d025fd6506749",
"size": "17092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scipy/optimize/_linprog_highs.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4491892"
},
{
"name": "C++",
"bytes": "960140"
},
{
"name": "Cython",
"bytes": "1050681"
},
{
"name": "Dockerfile",
"bytes": "9839"
},
{
"name": "Fortran",
"bytes": "5299482"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "778"
},
{
"name": "Meson",
"bytes": "141627"
},
{
"name": "Python",
"bytes": "14969167"
},
{
"name": "Shell",
"bytes": "3533"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
} |
import random
__author__ = "Patrick Lampe"
__email__ = "uni at lampep.de"
class AbstractNode:
"""
Parameter
---------
Attributes
----------
"""
def __init__(self, node_id):
self.crnt_movement_pattern = None
self.dict_of_movement_pattern = {}
self.node_id = node_id
def r():
return random.randint(0, 255)
self.color = '#%02X%02X%02X' % (r(), r(), r())
def get_name_of_movement_patter(self, movement_pattern):
return "" # self.crnt_movement_pattern.get_name()
def get_distance_in_m(self, snd_node):
"""
Parameters
----------
snd_node : Node
Returns
-------
"""
return self.get_distance_in_km(snd_node) * 1000
def get_distance_in_km(self, snd_node):
"""
Parameters
----------
snd_node : Node
Returns
-------
"""
return self.crnt_movement_pattern.location.get_distance_in_km(snd_node.crnt_movement_pattern.location)
def get_lat(self):
"""
Returns
-------
LatLon.lat
"""
return self.crnt_movement_pattern.get_lat()
def get_lon(self):
"""
Returns
-------
LatLon.lon
"""
return self.crnt_movement_pattern.location.get_lat_lon().to_string()[1]
def step(self):
"""
Parameters
----------
seconds : float
"""
self.__check_conditions()
self.crnt_movement_pattern.walk()
def __check_conditions(self):
pass
# TODO:
# raise NotImplementedError( "Should have implemented this" )
| {
"content_hash": "5179dab3b9544b99ae481f91704125d6",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 110,
"avg_line_length": 20.76829268292683,
"alnum_prop": 0.4926600117439812,
"repo_name": "miniworld-project/miniworld_core",
"id": "9edcbecc6948f1a0c3eb3db258104e956852ecb6",
"size": "1723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "miniworld/model/spatial/Node/AbstractNode.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "696934"
},
{
"name": "Shell",
"bytes": "1770"
}
],
"symlink_target": ""
} |
"""Family module for MediaWiki wiki."""
#
# (C) Pywikibot team, 2006-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
from pywikibot import family
# The MediaWiki family
class Family(family.WikimediaFamily, family.SingleSiteFamily):
"""Family module for MediaWiki wiki."""
name = 'mediawiki'
domain = 'www.mediawiki.org'
| {
"content_hash": "e3e141077edf06a3e1f231c5443826f9",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 62,
"avg_line_length": 22.5,
"alnum_prop": 0.7234567901234568,
"repo_name": "magul/pywikibot-core",
"id": "5d5a3675c5eaabaa75517f8804514cb74770ae32",
"size": "429",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pywikibot/families/mediawiki_family.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "HTML",
"bytes": "1365"
},
{
"name": "Python",
"bytes": "4538707"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import git
import logging
import logging.config
import os
import shutil
import sys
import tarfile
import six
from past.builtins import basestring
from six.moves.urllib.parse import urlparse, urlunparse, urlencode
from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError
#{{{ logging config
logging.config.dictConfig({
'version': 1,
'root': {
'handlers': ['console'],
'level': 'DEBUG',
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'default',
'level': 'DEBUG',
'stream': 'ext://sys.stdout',
},
},
'formatters': {
'default': {
'format': '%(levelname)s: %(message)s',
}
},
})
#}}}
#{{{ subparser implementation
class subcommand(object):
_parser = argparse.ArgumentParser()
_subparser = _parser.add_subparsers(dest='command')
def __new__(cls, command_or_f=None, command=None):
if isinstance(command_or_f, basestring):
return lambda f: subcommand(f, command_or_f)
elif callable(command_or_f):
return object.__new__(cls)
def __init__(self, function, command=None):
self.parser = self._subparser.add_parser(command or function.__name__)
self.parser.set_defaults(function=function)
def __call__(self, *args, **kwargs):
return self.function(*args, **kwargs)
def __getattr__(self, key):
return getattr(self.parser, key)
@classmethod
def parse_args(cls, *args, **kwargs):
return cls._parser.parse_args(*args, **kwargs)
@classmethod
def dispatch(cls, *args, **kwargs):
ns = cls._parser.parse_args(*args, **kwargs)
if hasattr(ns, "function"):
return ns.function(ns)
else:
print("usage: rubyenv -h {install, list}")
@classmethod
def set_defaults(cls, *args, **kwargs):
cls._parser.set_defaults(*args, **kwargs)
#}}}
def get_virtualenv_dir():
if hasattr(sys, 'real_prefix'):
return sys.prefix
elif hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix:
return sys.prefix
raise Exception('no virtualenv found')
def ensure_ruby_build():
prefix = get_virtualenv_dir()
parent = os.path.join(prefix, 'src')
if not os.path.exists(parent):
os.makedirs(parent)
working = os.path.join(parent, 'ruby-build')
try:
return git.Repo(working).working_dir
except (git.exc.NoSuchPathError, git.exc.InvalidGitRepositoryError):
return git.Repo.clone_from('https://github.com/sstephenson/ruby-build.git', working).working_dir
def _get_prebuilt_list():
import platform
system = platform.system()
if system == 'Linux':
machine = platform.machine()
distro, vers, _ = platform.linux_distribution()
distro = distro.lower()
for url in urlopen('https://raw.githubusercontent.com/rvm/rvm/master/config/remote').read().splitlines():
if (isinstance(url, bytes)):
url = url.decode()
url = urlparse(url)
path = url.path.split('/')
if distro in path and vers in path and machine in path:
ver = path[-1]
ver, _ = os.path.splitext(ver)
ver, _ = os.path.splitext(ver)
ver = ver.split('-')[1:]
ver = '-'.join(ver)
yield ver, url
def _get_numerical_version(ver):
ver = ver.split('-')
ver, patch = ver[0], ver[1:]
ver = ver.split('.')
if patch:
patch = patch[0][1:]
ver.append(patch)
else:
ver.append(0)
return ver
def _copytree(src, dst):
if not os.path.exists(dst):
os.makedirs(dst)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.islink(s):
if os.path.lexists(d) or os.path.isfile(d):
os.remove(d)
elif os.path.isdir(d):
shutil.rmtree(d)
os.symlink(os.readlink(s), d)
elif os.path.isdir(s):
_copytree(s, d)
else:
try:
shutil.copy(s, d)
except IOError as e:
os.remove(d)
shutil.copy(s, d)
@subcommand
def install(ns):
if ns.prebuilt:
try:
for ver, url in sorted(_get_prebuilt_list(), key=lambda v_u: _get_numerical_version(v_u[0])):
if ver == ns.version:
break
else:
if ns.version != 'latest':
print('could not find version', ns.version)
sys.exit(1)
tarname = os.path.basename(url.path)
base, _ = os.path.splitext(tarname)
base, _ = os.path.splitext(base)
tarpath = os.path.join(get_virtualenv_dir(), 'src', tarname)
extractdir = os.path.dirname(tarpath)
extractpath = os.path.join(extractdir, base)
resp = urlopen(urlunparse(url))
content = resp.read()
with open(tarpath, 'wb') as f:
f.write(content)
if os.path.exists(extractpath):
shutil.rmtree(extractpath)
t = tarfile.open(tarpath)
t.extractall(extractdir)
t.close()
_copytree(extractpath, get_virtualenv_dir())
cachedir = os.path.join(get_virtualenv_dir(), 'lib', 'ruby', 'gems', '2.2.0', 'cache')
os.unlink(cachedir)
os.mkdir(cachedir)
return
except Exception as e:
print('Could not install prebuilt binary', e)
import traceback
traceback.print_exc()
sys.exit(1)
ruby_build = os.path.join(ensure_ruby_build(), 'bin', 'ruby-build')
cmd = '%s %s %s %s' %(ruby_build, ns.version, get_virtualenv_dir(), '-v')
print("Calling: %s" % cmd)
os.system(cmd)
install.add_argument('version', type=str, nargs='?', default='latest')
install.add_argument('--prebuilt', action='store_true')
@subcommand('list')
def _list(ns):
if ns.prebuilt:
try:
for ver, url in _get_prebuilt_list():
print(ver)
return
except Exception as e:
print('Could not load prebuilt list', e)
sys.exit(1)
ruby_build = os.path.join(ensure_ruby_build(), 'bin', 'ruby-build')
os.system('%s --definitions' % ruby_build)
_list.add_argument('--prebuilt', action='store_true')
def main():
subcommand.dispatch()
| {
"content_hash": "ac15ad802bbc0f571ec44e2d6c735d68",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 109,
"avg_line_length": 31.641148325358852,
"alnum_prop": 0.5646453954332376,
"repo_name": "twang817/rubyenv",
"id": "c8279f085768791cad32ff2a07ee3ba3404f7512",
"size": "6613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rubyenv/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7563"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class XaxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(self, plotly_name="xaxis", parent_name="image", **kwargs):
super(XaxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop("dflt", "x"),
edit_type=kwargs.pop("edit_type", "calc+clearAxisTypes"),
role=kwargs.pop("role", "info"),
**kwargs
)
| {
"content_hash": "94e1f6497503ffbe5e7f87bfd45dfde3",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 75,
"avg_line_length": 37.92307692307692,
"alnum_prop": 0.5983772819472617,
"repo_name": "plotly/python-api",
"id": "aefb3fb4a36aeeca62a6880db10418f1aeeda2e8",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/image/_xaxis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
"""Test the Soundtouch component."""
from unittest.mock import call, patch
from libsoundtouch.device import (
Config,
Preset,
SoundTouchDevice as STD,
Status,
Volume,
ZoneSlave,
ZoneStatus,
)
import pytest
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
)
from homeassistant.components.soundtouch import media_player as soundtouch
from homeassistant.components.soundtouch.const import DOMAIN
from homeassistant.components.soundtouch.media_player import (
ATTR_SOUNDTOUCH_GROUP,
ATTR_SOUNDTOUCH_ZONE,
DATA_SOUNDTOUCH,
)
from homeassistant.const import STATE_OFF, STATE_PAUSED, STATE_PLAYING
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.setup import async_setup_component
# pylint: disable=super-init-not-called
DEVICE_1_IP = "192.168.0.1"
DEVICE_2_IP = "192.168.0.2"
DEVICE_1_ID = 1
DEVICE_2_ID = 2
def get_config(host=DEVICE_1_IP, port=8090, name="soundtouch"):
"""Return a default component."""
return {"platform": DOMAIN, "host": host, "port": port, "name": name}
DEVICE_1_CONFIG = {**get_config(), "name": "soundtouch_1"}
DEVICE_2_CONFIG = {**get_config(), "host": DEVICE_2_IP, "name": "soundtouch_2"}
@pytest.fixture(name="one_device")
def one_device_fixture():
"""Mock one master device."""
device_1 = MockDevice()
device_patch = patch(
"homeassistant.components.soundtouch.media_player.soundtouch_device",
return_value=device_1,
)
with device_patch as device:
yield device
@pytest.fixture(name="two_zones")
def two_zones_fixture():
"""Mock one master and one slave."""
device_1 = MockDevice(
DEVICE_1_ID,
MockZoneStatus(
is_master=True,
master_id=DEVICE_1_ID,
master_ip=DEVICE_1_IP,
slaves=[MockZoneSlave(DEVICE_2_IP)],
),
)
device_2 = MockDevice(
DEVICE_2_ID,
MockZoneStatus(
is_master=False,
master_id=DEVICE_1_ID,
master_ip=DEVICE_1_IP,
slaves=[MockZoneSlave(DEVICE_2_IP)],
),
)
devices = {DEVICE_1_IP: device_1, DEVICE_2_IP: device_2}
device_patch = patch(
"homeassistant.components.soundtouch.media_player.soundtouch_device",
side_effect=lambda host, _: devices[host],
)
with device_patch as device:
yield device
@pytest.fixture(name="mocked_status")
def status_fixture():
"""Mock the device status."""
status_patch = patch(
"libsoundtouch.device.SoundTouchDevice.status", side_effect=MockStatusPlaying
)
with status_patch as status:
yield status
@pytest.fixture(name="mocked_volume")
def volume_fixture():
"""Mock the device volume."""
volume_patch = patch("libsoundtouch.device.SoundTouchDevice.volume")
with volume_patch as volume:
yield volume
async def setup_soundtouch(hass, config):
"""Set up soundtouch integration."""
assert await async_setup_component(hass, "media_player", {"media_player": config})
await hass.async_block_till_done()
await hass.async_start()
class MockDevice(STD):
"""Mock device."""
def __init__(self, id=None, zone_status=None):
"""Init the class."""
self._config = MockConfig(id)
self._zone_status = zone_status or MockZoneStatus()
def zone_status(self, refresh=True):
"""Zone status mock object."""
return self._zone_status
class MockConfig(Config):
"""Mock config."""
def __init__(self, id=None):
"""Init class."""
self._name = "name"
self._id = id or DEVICE_1_ID
class MockZoneStatus(ZoneStatus):
"""Mock zone status."""
def __init__(self, is_master=True, master_id=None, master_ip=None, slaves=None):
"""Init the class."""
self._is_master = is_master
self._master_id = master_id
self._master_ip = master_ip
self._slaves = slaves or []
class MockZoneSlave(ZoneSlave):
"""Mock zone slave."""
def __init__(self, device_ip=None, role=None):
"""Init the class."""
self._ip = device_ip
self._role = role
def _mocked_presets(*args, **kwargs):
"""Return a list of mocked presets."""
return [MockPreset("1")]
class MockPreset(Preset):
"""Mock preset."""
def __init__(self, id_):
"""Init the class."""
self._id = id_
self._name = "preset"
class MockVolume(Volume):
"""Mock volume with value."""
def __init__(self):
"""Init class."""
self._actual = 12
self._muted = False
class MockVolumeMuted(Volume):
"""Mock volume muted."""
def __init__(self):
"""Init the class."""
self._actual = 12
self._muted = True
class MockStatusStandby(Status):
"""Mock status standby."""
def __init__(self):
"""Init the class."""
self._source = "STANDBY"
class MockStatusPlaying(Status):
"""Mock status playing media."""
def __init__(self):
"""Init the class."""
self._source = ""
self._play_status = "PLAY_STATE"
self._image = "image.url"
self._artist = "artist"
self._track = "track"
self._album = "album"
self._duration = 1
self._station_name = None
class MockStatusPlayingRadio(Status):
"""Mock status radio."""
def __init__(self):
"""Init the class."""
self._source = ""
self._play_status = "PLAY_STATE"
self._image = "image.url"
self._artist = None
self._track = None
self._album = None
self._duration = None
self._station_name = "station"
class MockStatusUnknown(Status):
"""Mock status unknown media."""
def __init__(self):
"""Init the class."""
self._source = ""
self._play_status = "PLAY_STATE"
self._image = "image.url"
self._artist = None
self._track = None
self._album = None
self._duration = None
self._station_name = None
class MockStatusPause(Status):
"""Mock status pause."""
def __init__(self):
"""Init the class."""
self._source = ""
self._play_status = "PAUSE_STATE"
self._image = "image.url"
self._artist = None
self._track = None
self._album = None
self._duration = None
self._station_name = None
class MockStatusPlayingAux(Status):
"""Mock status AUX."""
def __init__(self):
"""Init the class."""
self._source = "AUX"
self._play_status = "PLAY_STATE"
self._image = "image.url"
self._artist = None
self._track = None
self._album = None
self._duration = None
self._station_name = None
class MockStatusPlayingBluetooth(Status):
"""Mock status Bluetooth."""
def __init__(self):
"""Init the class."""
self._source = "BLUETOOTH"
self._play_status = "PLAY_STATE"
self._image = "image.url"
self._artist = "artist"
self._track = "track"
self._album = "album"
self._duration = None
self._station_name = None
async def test_ensure_setup_config(mocked_status, mocked_volume, hass, one_device):
"""Test setup OK with custom config."""
await setup_soundtouch(
hass, get_config(host="192.168.1.44", port=8888, name="custom_sound")
)
assert one_device.call_count == 1
assert one_device.call_args == call("192.168.1.44", 8888)
assert len(hass.states.async_all()) == 1
state = hass.states.get("media_player.custom_sound")
assert state.name == "custom_sound"
async def test_ensure_setup_discovery(mocked_status, mocked_volume, hass, one_device):
"""Test setup with discovery."""
new_device = {
"port": "8090",
"host": "192.168.1.1",
"properties": {},
"hostname": "hostname.local",
}
await async_load_platform(
hass, "media_player", DOMAIN, new_device, {"media_player": {}}
)
await hass.async_block_till_done()
assert one_device.call_count == 1
assert one_device.call_args == call("192.168.1.1", 8090)
assert len(hass.states.async_all()) == 1
async def test_ensure_setup_discovery_no_duplicate(
mocked_status, mocked_volume, hass, one_device
):
"""Test setup OK if device already exists."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert len(hass.states.async_all()) == 1
new_device = {
"port": "8090",
"host": "192.168.1.1",
"properties": {},
"hostname": "hostname.local",
}
await async_load_platform(
hass, "media_player", DOMAIN, new_device, {"media_player": DEVICE_1_CONFIG}
)
await hass.async_block_till_done()
assert one_device.call_count == 2
assert len(hass.states.async_all()) == 2
existing_device = {
"port": "8090",
"host": "192.168.0.1",
"properties": {},
"hostname": "hostname.local",
}
await async_load_platform(
hass, "media_player", DOMAIN, existing_device, {"media_player": DEVICE_1_CONFIG}
)
await hass.async_block_till_done()
assert one_device.call_count == 2
assert len(hass.states.async_all()) == 2
async def test_playing_media(mocked_status, mocked_volume, hass, one_device):
"""Test playing media info."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_PLAYING
assert entity_1_state.attributes["media_title"] == "artist - track"
assert entity_1_state.attributes["media_track"] == "track"
assert entity_1_state.attributes["media_artist"] == "artist"
assert entity_1_state.attributes["media_album_name"] == "album"
assert entity_1_state.attributes["media_duration"] == 1
async def test_playing_unknown_media(mocked_status, mocked_volume, hass, one_device):
"""Test playing media info."""
mocked_status.side_effect = MockStatusUnknown
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_PLAYING
async def test_playing_radio(mocked_status, mocked_volume, hass, one_device):
"""Test playing radio info."""
mocked_status.side_effect = MockStatusPlayingRadio
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_PLAYING
assert entity_1_state.attributes["media_title"] == "station"
async def test_playing_aux(mocked_status, mocked_volume, hass, one_device):
"""Test playing AUX info."""
mocked_status.side_effect = MockStatusPlayingAux
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_PLAYING
assert entity_1_state.attributes["source"] == "AUX"
async def test_playing_bluetooth(mocked_status, mocked_volume, hass, one_device):
"""Test playing Bluetooth info."""
mocked_status.side_effect = MockStatusPlayingBluetooth
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_PLAYING
assert entity_1_state.attributes["source"] == "BLUETOOTH"
assert entity_1_state.attributes["media_track"] == "track"
assert entity_1_state.attributes["media_artist"] == "artist"
assert entity_1_state.attributes["media_album_name"] == "album"
async def test_get_volume_level(mocked_status, mocked_volume, hass, one_device):
"""Test volume level."""
mocked_volume.side_effect = MockVolume
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.attributes["volume_level"] == 0.12
async def test_get_state_off(mocked_status, mocked_volume, hass, one_device):
"""Test state device is off."""
mocked_status.side_effect = MockStatusStandby
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_OFF
async def test_get_state_pause(mocked_status, mocked_volume, hass, one_device):
"""Test state device is paused."""
mocked_status.side_effect = MockStatusPause
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.state == STATE_PAUSED
async def test_is_muted(mocked_status, mocked_volume, hass, one_device):
"""Test device volume is muted."""
mocked_volume.side_effect = MockVolumeMuted
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.attributes["is_volume_muted"]
async def test_media_commands(mocked_status, mocked_volume, hass, one_device):
"""Test supported media commands."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.attributes["supported_features"] == 20413
@patch("libsoundtouch.device.SoundTouchDevice.power_off")
async def test_should_turn_off(
mocked_power_off, mocked_status, mocked_volume, hass, one_device
):
"""Test device is turned off."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"turn_off",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 3
assert mocked_power_off.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.power_on")
async def test_should_turn_on(
mocked_power_on, mocked_status, mocked_volume, hass, one_device
):
"""Test device is turned on."""
mocked_status.side_effect = MockStatusStandby
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"turn_on",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 3
assert mocked_power_on.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.volume_up")
async def test_volume_up(
mocked_volume_up, mocked_status, mocked_volume, hass, one_device
):
"""Test volume up."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"volume_up",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_volume.call_count == 3
assert mocked_volume_up.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.volume_down")
async def test_volume_down(
mocked_volume_down, mocked_status, mocked_volume, hass, one_device
):
"""Test volume down."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"volume_down",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_volume.call_count == 3
assert mocked_volume_down.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.set_volume")
async def test_set_volume_level(
mocked_set_volume, mocked_status, mocked_volume, hass, one_device
):
"""Test set volume level."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"volume_set",
{"entity_id": "media_player.soundtouch_1", "volume_level": 0.17},
True,
)
assert mocked_volume.call_count == 3
mocked_set_volume.assert_called_with(17)
@patch("libsoundtouch.device.SoundTouchDevice.mute")
async def test_mute(mocked_mute, mocked_status, mocked_volume, hass, one_device):
"""Test mute volume."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"volume_mute",
{"entity_id": "media_player.soundtouch_1", "is_volume_muted": True},
True,
)
assert mocked_volume.call_count == 3
assert mocked_mute.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.play")
async def test_play(mocked_play, mocked_status, mocked_volume, hass, one_device):
"""Test play command."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"media_play",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 3
assert mocked_play.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.pause")
async def test_pause(mocked_pause, mocked_status, mocked_volume, hass, one_device):
"""Test pause command."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"media_pause",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 3
assert mocked_pause.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.play_pause")
async def test_play_pause(
mocked_play_pause, mocked_status, mocked_volume, hass, one_device
):
"""Test play/pause."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"media_play_pause",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 3
assert mocked_play_pause.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.previous_track")
@patch("libsoundtouch.device.SoundTouchDevice.next_track")
async def test_next_previous_track(
mocked_next_track,
mocked_previous_track,
mocked_status,
mocked_volume,
hass,
one_device,
):
"""Test next/previous track."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"media_next_track",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 3
assert mocked_next_track.call_count == 1
await hass.services.async_call(
"media_player",
"media_previous_track",
{"entity_id": "media_player.soundtouch_1"},
True,
)
assert mocked_status.call_count == 4
assert mocked_previous_track.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.select_preset")
@patch("libsoundtouch.device.SoundTouchDevice.presets", side_effect=_mocked_presets)
async def test_play_media(
mocked_presets, mocked_select_preset, mocked_status, mocked_volume, hass, one_device
):
"""Test play preset 1."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"play_media",
{
"entity_id": "media_player.soundtouch_1",
ATTR_MEDIA_CONTENT_TYPE: "PLAYLIST",
ATTR_MEDIA_CONTENT_ID: 1,
},
True,
)
assert mocked_presets.call_count == 1
assert mocked_select_preset.call_count == 1
await hass.services.async_call(
"media_player",
"play_media",
{
"entity_id": "media_player.soundtouch_1",
ATTR_MEDIA_CONTENT_TYPE: "PLAYLIST",
ATTR_MEDIA_CONTENT_ID: 2,
},
True,
)
assert mocked_presets.call_count == 2
assert mocked_select_preset.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.play_url")
async def test_play_media_url(
mocked_play_url, mocked_status, mocked_volume, hass, one_device
):
"""Test play preset 1."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert one_device.call_count == 1
assert mocked_status.call_count == 2
assert mocked_volume.call_count == 2
await hass.services.async_call(
"media_player",
"play_media",
{
"entity_id": "media_player.soundtouch_1",
ATTR_MEDIA_CONTENT_TYPE: "MUSIC",
ATTR_MEDIA_CONTENT_ID: "http://fqdn/file.mp3",
},
True,
)
mocked_play_url.assert_called_with("http://fqdn/file.mp3")
@patch("libsoundtouch.device.SoundTouchDevice.select_source_aux")
async def test_select_source_aux(
mocked_select_source_aux, mocked_status, mocked_volume, hass, one_device
):
"""Test select AUX."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert mocked_select_source_aux.call_count == 0
await hass.services.async_call(
"media_player",
"select_source",
{"entity_id": "media_player.soundtouch_1", ATTR_INPUT_SOURCE: "AUX"},
True,
)
assert mocked_select_source_aux.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.select_source_bluetooth")
async def test_select_source_bluetooth(
mocked_select_source_bluetooth, mocked_status, mocked_volume, hass, one_device
):
"""Test select Bluetooth."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert mocked_select_source_bluetooth.call_count == 0
await hass.services.async_call(
"media_player",
"select_source",
{"entity_id": "media_player.soundtouch_1", ATTR_INPUT_SOURCE: "BLUETOOTH"},
True,
)
assert mocked_select_source_bluetooth.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.select_source_bluetooth")
@patch("libsoundtouch.device.SoundTouchDevice.select_source_aux")
async def test_select_source_invalid_source(
mocked_select_source_aux,
mocked_select_source_bluetooth,
mocked_status,
mocked_volume,
hass,
one_device,
):
"""Test select unsupported source."""
await setup_soundtouch(hass, DEVICE_1_CONFIG)
assert mocked_select_source_aux.call_count == 0
assert mocked_select_source_bluetooth.call_count == 0
await hass.services.async_call(
"media_player",
"select_source",
{
"entity_id": "media_player.soundtouch_1",
ATTR_INPUT_SOURCE: "SOMETHING_UNSUPPORTED",
},
True,
)
assert mocked_select_source_aux.call_count == 0
assert mocked_select_source_bluetooth.call_count == 0
@patch("libsoundtouch.device.SoundTouchDevice.create_zone")
async def test_play_everywhere(
mocked_create_zone, mocked_status, mocked_volume, hass, two_zones
):
"""Test play everywhere."""
mocked_device = two_zones
await setup_soundtouch(hass, [DEVICE_1_CONFIG, DEVICE_2_CONFIG])
assert mocked_device.call_count == 2
assert mocked_status.call_count == 4
assert mocked_volume.call_count == 4
# one master, one slave => create zone
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_PLAY_EVERYWHERE,
{"master": "media_player.soundtouch_1"},
True,
)
assert mocked_create_zone.call_count == 1
# unknown master, create zone must not be called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_PLAY_EVERYWHERE,
{"master": "media_player.entity_X"},
True,
)
assert mocked_create_zone.call_count == 1
# no slaves, create zone must not be called
for entity in list(hass.data[DATA_SOUNDTOUCH]):
if entity.entity_id == "media_player.soundtouch_1":
continue
hass.data[DATA_SOUNDTOUCH].remove(entity)
await entity.async_remove()
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_PLAY_EVERYWHERE,
{"master": "media_player.soundtouch_1"},
True,
)
assert mocked_create_zone.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.create_zone")
async def test_create_zone(
mocked_create_zone, mocked_status, mocked_volume, hass, two_zones
):
"""Test creating a zone."""
mocked_device = two_zones
await setup_soundtouch(hass, [DEVICE_1_CONFIG, DEVICE_2_CONFIG])
assert mocked_device.call_count == 2
assert mocked_status.call_count == 4
assert mocked_volume.call_count == 4
# one master, one slave => create zone
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_CREATE_ZONE,
{
"master": "media_player.soundtouch_1",
"slaves": ["media_player.soundtouch_2"],
},
True,
)
assert mocked_create_zone.call_count == 1
# unknown master, create zone must not be called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_CREATE_ZONE,
{"master": "media_player.entity_X", "slaves": ["media_player.soundtouch_2"]},
True,
)
assert mocked_create_zone.call_count == 1
# no slaves, create zone must not be called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_CREATE_ZONE,
{"master": "media_player.soundtouch_1", "slaves": []},
True,
)
assert mocked_create_zone.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.remove_zone_slave")
async def test_remove_zone_slave(
mocked_remove_zone_slave, mocked_status, mocked_volume, hass, two_zones
):
"""Test adding a slave to an existing zone."""
mocked_device = two_zones
await setup_soundtouch(hass, [DEVICE_1_CONFIG, DEVICE_2_CONFIG])
assert mocked_device.call_count == 2
assert mocked_status.call_count == 4
assert mocked_volume.call_count == 4
# remove one slave
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_REMOVE_ZONE_SLAVE,
{
"master": "media_player.soundtouch_1",
"slaves": ["media_player.soundtouch_2"],
},
True,
)
assert mocked_remove_zone_slave.call_count == 1
# unknown master. add zone slave is not called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_REMOVE_ZONE_SLAVE,
{"master": "media_player.entity_X", "slaves": ["media_player.soundtouch_2"]},
True,
)
assert mocked_remove_zone_slave.call_count == 1
# no slave to add, add zone slave is not called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_REMOVE_ZONE_SLAVE,
{"master": "media_player.soundtouch_1", "slaves": []},
True,
)
assert mocked_remove_zone_slave.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.add_zone_slave")
async def test_add_zone_slave(
mocked_add_zone_slave,
mocked_status,
mocked_volume,
hass,
two_zones,
):
"""Test removing a slave from a zone."""
mocked_device = two_zones
await setup_soundtouch(hass, [DEVICE_1_CONFIG, DEVICE_2_CONFIG])
assert mocked_device.call_count == 2
assert mocked_status.call_count == 4
assert mocked_volume.call_count == 4
# add one slave
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_ADD_ZONE_SLAVE,
{
"master": "media_player.soundtouch_1",
"slaves": ["media_player.soundtouch_2"],
},
True,
)
assert mocked_add_zone_slave.call_count == 1
# unknown master, add zone slave is not called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_ADD_ZONE_SLAVE,
{"master": "media_player.entity_X", "slaves": ["media_player.soundtouch_2"]},
True,
)
assert mocked_add_zone_slave.call_count == 1
# no slave to add, add zone slave is not called
await hass.services.async_call(
soundtouch.DOMAIN,
soundtouch.SERVICE_ADD_ZONE_SLAVE,
{"master": "media_player.soundtouch_1", "slaves": ["media_player.entity_X"]},
True,
)
assert mocked_add_zone_slave.call_count == 1
@patch("libsoundtouch.device.SoundTouchDevice.create_zone")
async def test_zone_attributes(
mocked_create_zone,
mocked_status,
mocked_volume,
hass,
two_zones,
):
"""Test play everywhere."""
mocked_device = two_zones
await setup_soundtouch(hass, [DEVICE_1_CONFIG, DEVICE_2_CONFIG])
assert mocked_device.call_count == 2
assert mocked_status.call_count == 4
assert mocked_volume.call_count == 4
entity_1_state = hass.states.get("media_player.soundtouch_1")
assert entity_1_state.attributes[ATTR_SOUNDTOUCH_ZONE]["is_master"]
assert (
entity_1_state.attributes[ATTR_SOUNDTOUCH_ZONE]["master"]
== "media_player.soundtouch_1"
)
assert entity_1_state.attributes[ATTR_SOUNDTOUCH_ZONE]["slaves"] == [
"media_player.soundtouch_2"
]
assert entity_1_state.attributes[ATTR_SOUNDTOUCH_GROUP] == [
"media_player.soundtouch_1",
"media_player.soundtouch_2",
]
entity_2_state = hass.states.get("media_player.soundtouch_2")
assert not entity_2_state.attributes[ATTR_SOUNDTOUCH_ZONE]["is_master"]
assert (
entity_2_state.attributes[ATTR_SOUNDTOUCH_ZONE]["master"]
== "media_player.soundtouch_1"
)
assert entity_2_state.attributes[ATTR_SOUNDTOUCH_ZONE]["slaves"] == [
"media_player.soundtouch_2"
]
assert entity_2_state.attributes[ATTR_SOUNDTOUCH_GROUP] == [
"media_player.soundtouch_1",
"media_player.soundtouch_2",
]
| {
"content_hash": "bd24bb19a48120627873e9da8ac77ffa",
"timestamp": "",
"source": "github",
"line_count": 1062,
"max_line_length": 88,
"avg_line_length": 30.129943502824858,
"alnum_prop": 0.6421651353209575,
"repo_name": "GenericStudent/home-assistant",
"id": "4ed8a648c770cbf2b062cf66447fc0f19d6ae45e",
"size": "31998",
"binary": false,
"copies": "14",
"ref": "refs/heads/dev",
"path": "tests/components/soundtouch/test_media_player.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
} |
import unittest
from cStringIO import StringIO
import trac.tests.compat
from trac.mimeview.patch import PatchRenderer
from trac.test import EnvironmentStub, Mock
from trac.web.api import RequestDone
from trac.web.href import Href
from trac.wiki.web_api import WikiRenderer
class WikiRendererTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.mod = WikiRenderer(self.env)
def test_load_stylesheet(self):
buf = StringIO()
def send(data):
buf.write(data)
raise RequestDone
text = """\
{{{#!text/x-diff
--- a/file.txt 2014-11-13 01:16:06 +0000
+++ b/file.txt 2014-11-13 01:16:06 +0000
@@ -1 +1 @@
-old line
+new line
}}}
"""
req = Mock(method='POST', path_info='/wiki_render', session={},
args={'id': 'WikiStart', 'text': text},
abs_href=Href('http://example.com/'), href=Href('/'),
chrome={'links': {}, 'scripts': []}, perm=None,
authname=None, tz=None, send=send)
self.assertTrue(self.mod.match_request(req))
try:
self.mod.process_request(req)
self.fail('RequestDone not raised')
except RequestDone:
output = buf.getvalue()
self.assertIn('<div class="code"><pre>', output)
self.assertIn('jQuery.loadStyleSheet("'
'/chrome/common/css/diff.css"', output)
def suite():
return unittest.makeSuite(WikiRendererTestCase)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| {
"content_hash": "121ba5a731e440a433f10ff5d9fc0bcd",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 72,
"avg_line_length": 29.425925925925927,
"alnum_prop": 0.5947136563876652,
"repo_name": "pkdevbox/trac",
"id": "adbf0e548543c0000d53bf7490806863c59f67eb",
"size": "2081",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "trac/wiki/tests/web_api.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3268"
},
{
"name": "CSS",
"bytes": "71129"
},
{
"name": "HTML",
"bytes": "356464"
},
{
"name": "JavaScript",
"bytes": "85641"
},
{
"name": "Makefile",
"bytes": "18957"
},
{
"name": "Python",
"bytes": "3830196"
},
{
"name": "Shell",
"bytes": "9573"
}
],
"symlink_target": ""
} |
from tempest_lib.common.utils import data_utils
from tempest_lib import decorators
from tempest import config
from tempest.openstack.common import log
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = log.getLogger(__name__)
class TestVolumeBootPattern(manager.ScenarioTest):
"""
This test case attempts to reproduce the following steps:
* Create in Cinder some bootable volume importing a Glance image
* Boot an instance from the bootable volume
* Write content to the volume
* Delete an instance and Boot a new instance from the volume
* Check written content in the instance
* Create a volume snapshot while the instance is running
* Boot an additional instance from the new snapshot based volume
* Check written content in the instance booted from snapshot
"""
@classmethod
def skip_checks(cls):
super(TestVolumeBootPattern, cls).skip_checks()
if not CONF.volume_feature_enabled.snapshot:
raise cls.skipException("Cinder volume snapshots are disabled")
def _create_volume_from_image(self):
img_uuid = CONF.compute.image_ref
vol_name = data_utils.rand_name('volume-origin')
return self.create_volume(name=vol_name, imageRef=img_uuid)
def _boot_instance_from_volume(self, vol_id, keypair):
# NOTE(gfidente): the syntax for block_device_mapping is
# dev_name=id:type:size:delete_on_terminate
# where type needs to be "snap" if the server is booted
# from a snapshot, size instead can be safely left empty
bd_map = [{
'device_name': 'vda',
'volume_id': vol_id,
'delete_on_termination': '0'}]
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'block_device_mapping': bd_map,
'key_name': keypair['name'],
'security_groups': security_groups
}
return self.create_server(image='', create_kwargs=create_kwargs)
def _create_snapshot_from_volume(self, vol_id):
snap_name = data_utils.rand_name('snapshot')
snap = self.snapshots_client.create_snapshot(
volume_id=vol_id,
force=True,
display_name=snap_name)
self.addCleanup_with_wait(
waiter_callable=self.snapshots_client.wait_for_resource_deletion,
thing_id=snap['id'], thing_id_param='id',
cleanup_callable=self.delete_wrapper,
cleanup_args=[self.snapshots_client.delete_snapshot, snap['id']])
self.snapshots_client.wait_for_snapshot_status(snap['id'], 'available')
self.assertEqual(snap_name, snap['display_name'])
return snap
def _create_volume_from_snapshot(self, snap_id):
vol_name = data_utils.rand_name('volume')
return self.create_volume(name=vol_name, snapshot_id=snap_id)
def _stop_instances(self, instances):
# NOTE(gfidente): two loops so we do not wait for the status twice
for i in instances:
self.servers_client.stop(i['id'])
for i in instances:
self.servers_client.wait_for_server_status(i['id'], 'SHUTOFF')
def _detach_volumes(self, volumes):
# NOTE(gfidente): two loops so we do not wait for the status twice
for v in volumes:
self.volumes_client.detach_volume(v['id'])
for v in volumes:
self.volumes_client.wait_for_volume_status(v['id'], 'available')
def _ssh_to_server(self, server, keypair):
if CONF.compute.use_floatingip_for_ssh:
floating_ip = self.floating_ips_client.create_floating_ip()
self.addCleanup(self.delete_wrapper,
self.floating_ips_client.delete_floating_ip,
floating_ip['id'])
self.floating_ips_client.associate_floating_ip_to_server(
floating_ip['ip'], server['id'])
ip = floating_ip['ip']
else:
network_name_for_ssh = CONF.compute.network_for_ssh
ip = server.networks[network_name_for_ssh][0]
return self.get_remote_client(ip, private_key=keypair['private_key'],
log_console_of_servers=[server])
def _get_content(self, ssh_client):
return ssh_client.exec_command('cat /tmp/text')
def _write_text(self, ssh_client):
text = data_utils.rand_name('text-')
ssh_client.exec_command('echo "%s" > /tmp/text; sync' % (text))
return self._get_content(ssh_client)
def _delete_server(self, server):
self.servers_client.delete_server(server['id'])
self.servers_client.wait_for_server_termination(server['id'])
def _check_content_of_written_file(self, ssh_client, expected):
actual = self._get_content(ssh_client)
self.assertEqual(expected, actual)
@decorators.skip_because(bug='1373513')
@test.idempotent_id('557cd2c2-4eb8-4dce-98be-f86765ff311b')
@test.services('compute', 'volume', 'image')
def test_volume_boot_pattern(self):
keypair = self.create_keypair()
self.security_group = self._create_security_group()
# create an instance from volume
volume_origin = self._create_volume_from_image()
instance_1st = self._boot_instance_from_volume(volume_origin['id'],
keypair)
# write content to volume on instance
ssh_client_for_instance_1st = self._ssh_to_server(instance_1st,
keypair)
text = self._write_text(ssh_client_for_instance_1st)
# delete instance
self._delete_server(instance_1st)
# create a 2nd instance from volume
instance_2nd = self._boot_instance_from_volume(volume_origin['id'],
keypair)
# check the content of written file
ssh_client_for_instance_2nd = self._ssh_to_server(instance_2nd,
keypair)
self._check_content_of_written_file(ssh_client_for_instance_2nd, text)
# snapshot a volume
snapshot = self._create_snapshot_from_volume(volume_origin['id'])
# create a 3rd instance from snapshot
volume = self._create_volume_from_snapshot(snapshot['id'])
instance_from_snapshot = self._boot_instance_from_volume(volume['id'],
keypair)
# check the content of written file
ssh_client = self._ssh_to_server(instance_from_snapshot, keypair)
self._check_content_of_written_file(ssh_client, text)
# NOTE(gfidente): ensure resources are in clean state for
# deletion operations to succeed
self._stop_instances([instance_2nd, instance_from_snapshot])
self._detach_volumes([volume_origin, volume])
class TestVolumeBootPatternV2(TestVolumeBootPattern):
def _boot_instance_from_volume(self, vol_id, keypair):
bdms = [{'uuid': vol_id, 'source_type': 'volume',
'destination_type': 'volume', 'boot_index': 0,
'delete_on_termination': False}]
self.security_group = self._create_security_group()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'block_device_mapping_v2': bdms,
'key_name': keypair['name'],
'security_groups': security_groups
}
return self.create_server(image='', create_kwargs=create_kwargs)
| {
"content_hash": "e73e8a75f8a4382ea1752bd03d45e56b",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 79,
"avg_line_length": 42.37362637362637,
"alnum_prop": 0.6130705394190872,
"repo_name": "rzarzynski/tempest",
"id": "3c5e88cda626b4255a3c207a6a70eeef6cbffb07",
"size": "8285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tempest/scenario/test_volume_boot_pattern.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "695"
},
{
"name": "Python",
"bytes": "2888467"
},
{
"name": "Shell",
"bytes": "8560"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(315, 410)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())
Form.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(12)
Form.setFont(font)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(0)
self.gridLayout.setHorizontalSpacing(10)
self.gridLayout.setVerticalSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.groupBox = QtGui.QGroupBox(Form)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout_2 = QtGui.QGridLayout(self.groupBox)
self.gridLayout_2.setSizeConstraint(QtGui.QLayout.SetNoConstraint)
self.gridLayout_2.setMargin(0)
self.gridLayout_2.setHorizontalSpacing(5)
self.gridLayout_2.setVerticalSpacing(0)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.line = QtGui.QFrame(self.groupBox)
self.line.setFrameShape(QtGui.QFrame.VLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.gridLayout_2.addWidget(self.line, 1, 1, 2, 1)
self.line_2 = QtGui.QFrame(self.groupBox)
self.line_2.setFrameShape(QtGui.QFrame.HLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.gridLayout_2.addWidget(self.line_2, 0, 0, 1, 1)
self.widget_2 = QtGui.QWidget(self.groupBox)
self.widget_2.setObjectName(_fromUtf8("widget_2"))
self.groupBox_2 = QtGui.QGroupBox(self.widget_2)
self.groupBox_2.setGeometry(QtCore.QRect(10, 5, 286, 156))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.layoutWidget = QtGui.QWidget(self.groupBox_2)
self.layoutWidget.setGeometry(QtCore.QRect(10, 25, 266, 66))
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.gridLayout_4 = QtGui.QGridLayout(self.layoutWidget)
self.gridLayout_4.setMargin(0)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.label_7 = QtGui.QLabel(self.layoutWidget)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout_4.addWidget(self.label_7, 0, 0, 1, 1)
self.label_4 = QtGui.QLabel(self.layoutWidget)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_4.addWidget(self.label_4, 1, 0, 1, 1)
self.ImagePhys_PhysThresh = QtGui.QDoubleSpinBox(self.layoutWidget)
self.ImagePhys_PhysThresh.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.ImagePhys_PhysThresh.setDecimals(1)
self.ImagePhys_PhysThresh.setMinimum(-1998.0)
self.ImagePhys_PhysThresh.setMaximum(2000.0)
self.ImagePhys_PhysThresh.setSingleStep(5.0)
self.ImagePhys_PhysThresh.setProperty("value", -50.0)
self.ImagePhys_PhysThresh.setObjectName(_fromUtf8("ImagePhys_PhysThresh"))
self.gridLayout_4.addWidget(self.ImagePhys_PhysThresh, 1, 1, 1, 1)
self.ImagePhys_PhysLPF = QtGui.QDoubleSpinBox(self.layoutWidget)
self.ImagePhys_PhysLPF.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.ImagePhys_PhysLPF.setMinimum(-5000.0)
self.ImagePhys_PhysLPF.setMaximum(50000.0)
self.ImagePhys_PhysLPF.setProperty("value", 2500.0)
self.ImagePhys_PhysLPF.setObjectName(_fromUtf8("ImagePhys_PhysLPF"))
self.gridLayout_4.addWidget(self.ImagePhys_PhysLPF, 0, 1, 1, 1)
self.ImagePhys_DetectSpikes = QtGui.QPushButton(self.groupBox_2)
self.ImagePhys_DetectSpikes.setGeometry(QtCore.QRect(75, 100, 137, 32))
self.ImagePhys_DetectSpikes.setMinimumSize(QtCore.QSize(5, 0))
self.ImagePhys_DetectSpikes.setObjectName(_fromUtf8("ImagePhys_DetectSpikes"))
self.groupBox_3 = QtGui.QGroupBox(self.widget_2)
self.groupBox_3.setGeometry(QtCore.QRect(15, 160, 281, 221))
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.layoutWidget1 = QtGui.QWidget(self.groupBox_3)
self.layoutWidget1.setGeometry(QtCore.QRect(15, 25, 236, 97))
self.layoutWidget1.setObjectName(_fromUtf8("layoutWidget1"))
self.gridLayout_3 = QtGui.QGridLayout(self.layoutWidget1)
self.gridLayout_3.setMargin(0)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.label_5 = QtGui.QLabel(self.layoutWidget1)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_3.addWidget(self.label_5, 2, 0, 1, 2)
self.ImagePhys_burstISI = QtGui.QDoubleSpinBox(self.layoutWidget1)
font = QtGui.QFont()
font.setPointSize(12)
self.ImagePhys_burstISI.setFont(font)
self.ImagePhys_burstISI.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.ImagePhys_burstISI.setDecimals(1)
self.ImagePhys_burstISI.setMinimum(1.0)
self.ImagePhys_burstISI.setMaximum(1000.0)
self.ImagePhys_burstISI.setSingleStep(10.0)
self.ImagePhys_burstISI.setProperty("value", 100.0)
self.ImagePhys_burstISI.setObjectName(_fromUtf8("ImagePhys_burstISI"))
self.gridLayout_3.addWidget(self.ImagePhys_burstISI, 0, 2, 1, 1)
self.ImagePhys_minBurstSpikes = QtGui.QSpinBox(self.layoutWidget1)
font = QtGui.QFont()
font.setPointSize(12)
self.ImagePhys_minBurstSpikes.setFont(font)
self.ImagePhys_minBurstSpikes.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.ImagePhys_minBurstSpikes.setMinimum(2)
self.ImagePhys_minBurstSpikes.setMaximum(20)
self.ImagePhys_minBurstSpikes.setProperty("value", 3)
self.ImagePhys_minBurstSpikes.setObjectName(_fromUtf8("ImagePhys_minBurstSpikes"))
self.gridLayout_3.addWidget(self.ImagePhys_minBurstSpikes, 2, 2, 1, 1)
self.label_9 = QtGui.QLabel(self.layoutWidget1)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.gridLayout_3.addWidget(self.label_9, 0, 0, 1, 2)
self.ImagePhys_withinBurstISI = QtGui.QDoubleSpinBox(self.layoutWidget1)
font = QtGui.QFont()
font.setPointSize(12)
self.ImagePhys_withinBurstISI.setFont(font)
self.ImagePhys_withinBurstISI.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.ImagePhys_withinBurstISI.setDecimals(1)
self.ImagePhys_withinBurstISI.setMinimum(1.0)
self.ImagePhys_withinBurstISI.setMaximum(1000.0)
self.ImagePhys_withinBurstISI.setSingleStep(2.0)
self.ImagePhys_withinBurstISI.setProperty("value", 40.0)
self.ImagePhys_withinBurstISI.setObjectName(_fromUtf8("ImagePhys_withinBurstISI"))
self.gridLayout_3.addWidget(self.ImagePhys_withinBurstISI, 1, 2, 1, 1)
self.label_8 = QtGui.QLabel(self.layoutWidget1)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout_3.addWidget(self.label_8, 1, 0, 1, 1)
self.ImagePhys_RevSTA = QtGui.QPushButton(self.groupBox_3)
self.ImagePhys_RevSTA.setEnabled(False)
self.ImagePhys_RevSTA.setGeometry(QtCore.QRect(35, 185, 93, 32))
self.ImagePhys_RevSTA.setObjectName(_fromUtf8("ImagePhys_RevSTA"))
self.ImagePhys_BTA = QtGui.QPushButton(self.groupBox_3)
self.ImagePhys_BTA.setGeometry(QtCore.QRect(35, 155, 195, 32))
self.ImagePhys_BTA.setObjectName(_fromUtf8("ImagePhys_BTA"))
self.ImagePhys_STA = QtGui.QPushButton(self.groupBox_3)
self.ImagePhys_STA.setGeometry(QtCore.QRect(35, 125, 197, 32))
self.ImagePhys_STA.setObjectName(_fromUtf8("ImagePhys_STA"))
self.gridLayout_2.addWidget(self.widget_2, 1, 0, 2, 1)
self.gridLayout.addWidget(self.groupBox, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Form", "Physiology Analysis Functions", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Form", "Physiology", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("Form", "LPF", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Form", "Event Thresh", None, QtGui.QApplication.UnicodeUTF8))
self.ImagePhys_PhysThresh.setSuffix(QtGui.QApplication.translate("Form", " pA", None, QtGui.QApplication.UnicodeUTF8))
self.ImagePhys_DetectSpikes.setText(QtGui.QApplication.translate("Form", "Detect Spikes", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Form", "Spike Triggered Averages", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Form", "Minimum # spikes/burst", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("Form", "Min Interburst Interval", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("Form", "Max burst ISI (msec)", None, QtGui.QApplication.UnicodeUTF8))
self.ImagePhys_RevSTA.setText(QtGui.QApplication.translate("Form", "Rev STA", None, QtGui.QApplication.UnicodeUTF8))
self.ImagePhys_BTA.setText(QtGui.QApplication.translate("Form", "Burst-triggered Average", None, QtGui.QApplication.UnicodeUTF8))
self.ImagePhys_STA.setText(QtGui.QApplication.translate("Form", "Spike-triggered Average", None, QtGui.QApplication.UnicodeUTF8))
| {
"content_hash": "34af9940db4fa55ff23fe1c99d05b836",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 139,
"avg_line_length": 62.61764705882353,
"alnum_prop": 0.7120713950211367,
"repo_name": "mgraupe/acq4",
"id": "d72f2ed79594fe528e0e9ae794bce95cc40cb283",
"size": "10930",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "acq4/analysis/modules/pbm_ImageAnalysis/ctrlTemplatePhysiology.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "3037"
},
{
"name": "Batchfile",
"bytes": "247"
},
{
"name": "C",
"bytes": "757367"
},
{
"name": "C++",
"bytes": "1222891"
},
{
"name": "CSS",
"bytes": "716"
},
{
"name": "Inno Setup",
"bytes": "1606"
},
{
"name": "MATLAB",
"bytes": "1752"
},
{
"name": "Makefile",
"bytes": "30"
},
{
"name": "Processing",
"bytes": "13403"
},
{
"name": "Python",
"bytes": "6110588"
},
{
"name": "Shell",
"bytes": "70"
}
],
"symlink_target": ""
} |
"""
Script to upload images to wikipedia.
Arguments:
-keep Keep the filename as is
-filename Target filename without the namespace prefix
-noverify Do not ask for verification of the upload description if one
is given
-abortonwarn: Abort upload on the specified warning type. If no warning type
is specified, aborts on any warning.
-ignorewarn: Ignores specified upload warnings. If no warning type is
specified, ignores all warnings. Use with caution
-chunked: Upload the file in chunks (more overhead, but restartable). If
no value is specified the chunk size is 1 MiB. The value must
be a number which can be preceded by a suffix. The units are:
No suffix: Bytes
'k': Kilobytes (1000 B)
'M': Megabytes (1000000 B)
'Ki': Kibibytes (1024 B)
'Mi': Mebibytes (1024x1024 B)
The suffixes are case insensitive.
-always Don't ask the user anything. This will imply -keep and
-noverify and require that either -abortonwarn or -ignorewarn
is defined for all. It will also require a valid file name and
description. It'll only overwrite files if -ignorewarn includes
the 'exists' warning.
-recursive When the filename is a directory it also uploads the files from
the subdirectories.
It is possible to combine -abortonwarn and -ignorewarn so that if the specific
warning is given it won't apply the general one but more specific one. So if it
should ignore specific warnings and abort on the rest it's possible by defining
no warning for -abortonwarn and the specific warnings for -ignorewarn. The order
does not matter. If both are unspecific or a warning is specified by both, it'll
prefer aborting.
If any other arguments are given, the first is either URL, filename or directory
to upload, and the rest is a proposed description to go with the upload. If none
of these are given, the user is asked for the directory, file or URL to upload.
The bot will then upload the image to the wiki.
The script will ask for the location of an image(s), if not given as a parameter,
and for a description.
"""
#
# (C) Rob W.W. Hooft, Andre Engels 2003-2004
# (C) Pywikibot team, 2003-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
#
import os
import time
import tempfile
import re
import math
import sys
import pywikibot
import pywikibot.data.api
from pywikibot import config
from pywikibot.bot import suggest_help, BaseBot
from pywikibot.tools import (
deprecated
)
if sys.version_info[0] > 2:
from urllib.parse import urlparse
from urllib.request import URLopener
basestring = (str,)
else:
from urlparse import urlparse
from urllib import URLopener
class UploadRobot(BaseBot):
"""Upload bot."""
def __init__(self, url, urlEncoding=None, description=u'',
useFilename=None, keepFilename=False,
verifyDescription=True, ignoreWarning=False,
targetSite=None, uploadByUrl=False, aborts=[], chunk_size=0,
**kwargs):
"""
Constructor.
@param url: path to url or local file (deprecated), or list of urls or
paths to local files.
@type url: string (deprecated) or list
@param description: Description of file for its page. If multiple files
are uploading the same description is used for every file.
@type description: string
@param useFilename: Specify title of the file's page. If multiple
files are uploading it asks to change the name for second, third,
etc. files, otherwise the last file will overwrite the other.
@type useFilename: string
@param keepFilename: Set to True to keep original names of urls and
files, otherwise it will ask to enter a name for each file.
@type keepFilename: bool
@param verifyDescription: Set to False to not proofread the description.
@type verifyDescription: bool
@param ignoreWarning: Set this to True to upload even if another file
would be overwritten or another mistake would be risked. Set it to
an array of warning codes to selectively ignore specific warnings.
@type ignoreWarning: bool or list
@param targetSite: Set the site to upload to. If target site is not
given it's taken from user-config.py.
@type targetSite: object
@param aborts: List of the warning types to abort upload on. Set to True
to abort on any warning.
@type aborts: bool or list
@param chunk_size: Upload the file in chunks (more overhead, but
restartable) specified in bytes. If no value is specified the file
will be uploaded as whole.
@type chunk_size: integer
@param always: Disables any input, requires that either ignoreWarning or
aborts are set to True and that the description is also set. It will
overwrite verifyDescription to False and keepFilename to True.
@type always: bool
@deprecated: Using upload_image() is deprecated, use upload_file() with
file_url param instead
"""
super(UploadRobot, self).__init__(**kwargs)
always = self.getOption('always')
if (always and ignoreWarning is not True and aborts is not True):
raise ValueError('When always is set to True, either ignoreWarning '
'or aborts must be set to True.')
if always and not description:
raise ValueError('When always is set to True the description must '
'be set.')
self.url = url
if isinstance(self.url, basestring):
pywikibot.warning("url as string is deprecated. "
"Use an iterable instead.")
self.urlEncoding = urlEncoding
self.description = description
self.useFilename = useFilename
self.keepFilename = keepFilename or always
self.verifyDescription = verifyDescription and not always
self.ignoreWarning = ignoreWarning
self.aborts = aborts
self.chunk_size = chunk_size
if config.upload_to_commons:
self.targetSite = targetSite or pywikibot.Site('commons',
'commons')
else:
self.targetSite = targetSite or pywikibot.Site()
self.targetSite.login()
self.uploadByUrl = uploadByUrl
@deprecated()
def urlOK(self):
"""Return True if self.url is a URL or an existing local file."""
return "://" in self.url or os.path.exists(self.url)
def read_file_content(self, file_url=None):
"""Return name of temp file in which remote file is saved."""
if not file_url:
file_url = self.url
pywikibot.warning("file_url is not given. "
"Set to self.url by default.")
pywikibot.output(u'Reading file %s' % file_url)
resume = False
rlen = 0
_contents = None
dt = 15
uo = URLopener()
retrieved = False
while not retrieved:
if resume:
pywikibot.output(u"Resume download...")
uo.addheader('Range', 'bytes=%s-' % rlen)
infile = uo.open(file_url)
if 'text/html' in infile.info().getheader('Content-Type'):
pywikibot.output(u"Couldn't download the image: "
"the requested URL was not found on server.")
return
content_len = infile.info().getheader('Content-Length')
accept_ranges = infile.info().getheader('Accept-Ranges') == 'bytes'
if resume:
_contents += infile.read()
else:
_contents = infile.read()
infile.close()
retrieved = True
if content_len:
rlen = len(_contents)
content_len = int(content_len)
if rlen < content_len:
retrieved = False
pywikibot.output(
u"Connection closed at byte %s (%s left)"
% (rlen, content_len))
if accept_ranges and rlen > 0:
resume = True
pywikibot.output(u"Sleeping for %d seconds..." % dt)
time.sleep(dt)
if dt <= 60:
dt += 15
elif dt < 360:
dt += 60
else:
pywikibot.log(
u"WARNING: length check of retrieved data not possible.")
handle, tempname = tempfile.mkstemp()
with os.fdopen(handle, "wb") as t:
t.write(_contents)
return tempname
def _handle_warning(self, warning):
"""
Return whether the warning cause an abort or be ignored.
@param warning: The warning name
@type warning: str
@return: False if this warning should cause an abort, True if it should
be ignored or None if this warning has no default handler.
@rtype: bool or None
"""
if self.aborts is not True:
if warning in self.aborts:
return False
if self.ignoreWarning is True or (self.ignoreWarning is not False and
warning in self.ignoreWarning):
return True
return None if self.aborts is not True else False
def _handle_warnings(self, warnings):
messages = '\n'.join('{0.code}: {0.info}'.format(warning)
for warning in sorted(warnings,
key=lambda w: w.code))
if len(warnings) > 1:
messages = '\n' + messages
pywikibot.output('We got the following warning(s): ' + messages)
answer = True
for warning in warnings:
this_answer = self._handle_warning(warning.code)
if this_answer is False:
answer = False
break
elif this_answer is None:
answer = None
if answer is None:
answer = pywikibot.input_yn(u"Do you want to ignore?",
default=False, automatic_quit=False)
return answer
def process_filename(self, file_url=None):
"""Return base filename portion of file_url."""
if not file_url:
file_url = self.url
pywikibot.warning("file_url is not given. "
"Set to self.url by default.")
always = self.getOption('always')
# Isolate the pure name
filename = file_url
# Filename may be either a URL or a local file path
if "://" in filename:
# extract the path portion of the URL
filename = urlparse(filename).path
filename = os.path.basename(filename)
if self.useFilename:
filename = self.useFilename
if not self.keepFilename:
pywikibot.output(
u"The filename on the target wiki will default to: %s"
% filename)
assert not always
newfn = pywikibot.input(
u'Enter a better name, or press enter to accept:')
if newfn != "":
filename = newfn
# FIXME: these 2 belong somewhere else, presumably in family
# forbidden characters are handled by pywikibot/page.py
forbidden = ':*?/\\' # to be extended
try:
allowed_formats = self.targetSite.siteinfo.get(
'fileextensions', get_default=False)
except KeyError:
allowed_formats = []
else:
allowed_formats = [item['ext'] for item in allowed_formats]
# ask until it's valid
first_check = True
while True:
if not first_check:
if always:
filename = None
else:
filename = pywikibot.input('Enter a better name, or press '
'enter to skip the file:')
if not filename:
return None
first_check = False
ext = os.path.splitext(filename)[1].lower().strip('.')
# are any chars in forbidden also in filename?
invalid = set(forbidden) & set(filename)
if invalid:
c = "".join(invalid)
pywikibot.output(
'Invalid character(s): %s. Please try again' % c)
continue
if allowed_formats and ext not in allowed_formats:
if always:
pywikibot.output('File format is not one of '
'[{0}]'.format(' '.join(allowed_formats)))
continue
elif not pywikibot.input_yn(
u"File format is not one of [%s], but %s. Continue?"
% (u' '.join(allowed_formats), ext),
default=False, automatic_quit=False):
continue
potential_file_page = pywikibot.FilePage(self.targetSite, filename)
if potential_file_page.exists():
overwrite = self._handle_warning('exists')
if overwrite is False:
pywikibot.output("File exists and you asked to abort. Skipping.")
return None
if potential_file_page.canBeEdited():
if overwrite is None:
overwrite = not pywikibot.input_yn(
"File with name %s already exists. "
"Would you like to change the name? "
"(Otherwise file will be overwritten.)"
% filename, default=True,
automatic_quit=False)
if not overwrite:
continue
else:
break
else:
pywikibot.output(u"File with name %s already exists and "
"cannot be overwritten." % filename)
continue
else:
try:
if potential_file_page.fileIsShared():
pywikibot.output(u"File with name %s already exists in shared "
"repository and cannot be overwritten."
% filename)
continue
else:
break
except pywikibot.NoPage:
break
# A proper description for the submission.
# Empty descriptions are not accepted.
pywikibot.output(u'The suggested description is:\n%s'
% self.description)
# Description must be set and verified
if not self.description:
self.verifyDescription = True
while not self.description or self.verifyDescription:
if not self.description:
pywikibot.output(
u'\03{lightred}It is not possible to upload a file '
'without a summary/description.\03{default}')
assert not always
# if no description, default is 'yes'
if pywikibot.input_yn(
u'Do you want to change this description?',
default=not self.description):
from pywikibot import editor as editarticle
editor = editarticle.TextEditor()
try:
newDescription = editor.edit(self.description)
except Exception as e:
pywikibot.error(e)
continue
# if user saved / didn't press Cancel
if newDescription:
self.description = newDescription
self.verifyDescription = False
return filename
def abort_on_warn(self, warn_code):
"""Determine if the warning message should cause an abort."""
if self.aborts is True:
return True
else:
return warn_code in self.aborts
def ignore_on_warn(self, warn_code):
"""Determine if the warning message should be ignored."""
if self.ignoreWarning is True:
return True
else:
return warn_code in self.ignoreWarning
@deprecated('UploadRobot.upload_file()')
def upload_image(self, debug=False):
"""Upload image."""
self.upload_file(self.url, debug)
def upload_file(self, file_url, debug=False, _file_key=None, _offset=0):
"""Upload the image at file_url to the target wiki.
Return the filename that was used to upload the image.
If the upload fails, ask the user whether to try again or not.
If the user chooses not to retry, return null.
"""
filename = self.process_filename(file_url)
if not filename:
return None
site = self.targetSite
imagepage = pywikibot.FilePage(site, filename) # normalizes filename
imagepage.text = self.description
pywikibot.output(u'Uploading file to %s via API...' % site)
success = False
try:
if self.ignoreWarning is True:
apiIgnoreWarnings = True
else:
apiIgnoreWarnings = self._handle_warnings
if self.uploadByUrl:
success = site.upload(imagepage, source_url=file_url,
ignore_warnings=apiIgnoreWarnings,
_file_key=_file_key, _offset=_offset)
else:
if "://" in file_url:
temp = self.read_file_content(file_url)
else:
temp = file_url
success = site.upload(imagepage, source_filename=temp,
ignore_warnings=apiIgnoreWarnings,
chunk_size=self.chunk_size,
_file_key=_file_key, _offset=_offset)
except pywikibot.data.api.APIError as error:
if error.code == u'uploaddisabled':
pywikibot.error("Upload error: Local file uploads are disabled on %s."
% site)
else:
pywikibot.error("Upload error: ", exc_info=True)
return None
except Exception:
pywikibot.error("Upload error: ", exc_info=True)
return None
else:
if success:
# No warning, upload complete.
pywikibot.output(u"Upload of %s successful." % filename)
return filename # data['filename']
else:
pywikibot.output(u"Upload aborted.")
return None
def run(self):
"""Run bot."""
# early check that upload is enabled
if self.targetSite.is_uploaddisabled():
pywikibot.error(
"Upload error: Local file uploads are disabled on %s."
% self.targetSite)
return
# early check that user has proper rights to upload
if "upload" not in self.targetSite.userinfo["rights"]:
pywikibot.error(
"User '%s' does not have upload rights on site %s."
% (self.targetSite.user(), self.targetSite))
return
if isinstance(self.url, basestring):
return self.upload_file(self.url)
for file_url in self.url:
self.upload_file(file_url)
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
url = u''
description = []
keepFilename = False
always = False
useFilename = None
verifyDescription = True
aborts = set()
ignorewarn = set()
chunk_size = 0
chunk_size_regex = r'^-chunked(?::(\d+(?:\.\d+)?)[ \t]*(k|ki|m|mi)?b?)?$'
chunk_size_regex = re.compile(chunk_size_regex, re.I)
recursive = False
# process all global bot args
# returns a list of non-global args, i.e. args for upload.py
for arg in pywikibot.handle_args(args):
if arg:
if arg == '-always':
keepFilename = True
always = True
verifyDescription = False
elif arg == '-recursive':
recursive = True
elif arg.startswith('-keep'):
keepFilename = True
elif arg.startswith('-filename:'):
useFilename = arg[10:]
elif arg.startswith('-noverify'):
verifyDescription = False
elif arg.startswith('-abortonwarn'):
if len(arg) > len('-abortonwarn:') and aborts is not True:
aborts.add(arg[len('-abortonwarn:'):])
else:
aborts = True
elif arg.startswith('-ignorewarn'):
if len(arg) > len('-ignorewarn:') and ignorewarn is not True:
ignorewarn.add(arg[len('-ignorewarn:'):])
else:
ignorewarn = True
elif arg.startswith('-chunked'):
match = chunk_size_regex.match(arg)
if match:
if match.group(1): # number was in there
base = float(match.group(1))
if match.group(2): # suffix too
suffix = match.group(2).lower()
if suffix == "k":
suffix = 1000
elif suffix == "m":
suffix = 1000000
elif suffix == "ki":
suffix = 1 << 10
elif suffix == "mi":
suffix = 1 << 20
else:
pass # huh?
else:
suffix = 1
chunk_size = math.trunc(base * suffix)
else:
chunk_size = 1 << 20 # default to 1 MiB
else:
pywikibot.error('Chunk size parameter is not valid.')
elif url == u'':
url = arg
else:
description.append(arg)
description = u' '.join(description)
while not ("://" in url or os.path.exists(url)):
if not url:
error = 'No input filename given.'
else:
error = 'Invalid input filename given.'
if not always:
error += ' Try again.'
if always:
url = None
break
else:
pywikibot.output(error)
url = pywikibot.input(u'URL, file or directory where files are now:')
if always and ((aborts is not True and ignorewarn is not True) or
not description or url is None):
additional = ''
missing = []
if url is None:
missing += ['filename']
additional = error + ' '
if description is None:
missing += ['description']
if aborts is not True and ignorewarn is not True:
additional += ('Either -ignorewarn or -abortonwarn must be '
'defined for all codes. ')
additional += 'Unable to run in -always mode'
suggest_help(missing_parameters=missing, additional_text=additional)
return False
if os.path.isdir(url):
file_list = []
for directory_info in os.walk(url):
if not recursive:
# Do not visit any subdirectories
directory_info[1][:] = []
for dir_file in directory_info[2]:
file_list.append(os.path.join(directory_info[0], dir_file))
url = file_list
else:
url = [url]
bot = UploadRobot(url, description=description, useFilename=useFilename,
keepFilename=keepFilename,
verifyDescription=verifyDescription,
aborts=aborts, ignoreWarning=ignorewarn,
chunk_size=chunk_size, always=always)
bot.run()
if __name__ == "__main__":
main()
| {
"content_hash": "8da22b7e808d233b48539686dd1ebbf0",
"timestamp": "",
"source": "github",
"line_count": 619,
"max_line_length": 87,
"avg_line_length": 40.484652665589664,
"alnum_prop": 0.5368715083798883,
"repo_name": "icyflame/batman",
"id": "060d1119754820c712f6120a1e20c546bdaffac2",
"size": "25102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/upload.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "Python",
"bytes": "3922041"
}
],
"symlink_target": ""
} |
'''
Copyright 2012 Will Snook (http://willsnook.com)
MIT License
Utility code for generating antenna geometry files in nec2 card stack format
'''
import math
# =======================================================================================================
# Field formatting functions (i.e. "columns" in punchcard-speak)
# =======================================================================================================
def sci(f):
''' Return formatted string containinga scientific notaion float in a 13 char wide field (xyz coordiates, radius)
'''
return '{: > 13.5E}'.format(f)
def dec(i):
''' Return formatted string containing a decimal integer in a 6 char wide field (tags, segments)
'''
return '{: >6d}'.format(i)
# =======================================================================================================
# Unit conversions... The nec2 engine requires its inputs to be in meters and degrees. Note that these
# functions are named to denote the pre-conversion units, because I consider those more suitable for
# the calculations I will be working with.
# =======================================================================================================
def m(m):
''' Convert meters to meters. Useful for being consistent about always specifying units and for
making sure not to accidentaly run afoul of Python's integer math (hence the * 1.0)
'''
return m * 1.0
def inch(i):
''' Convert inches to meters
'''
return i * 2.54 / 100.0
def deg(degrees):
''' Make sure degrees are float
'''
return degrees * 1.0
# =======================================================================================================
# Output conversions from meters back to inches
# =======================================================================================================
def mToIn(meters):
''' Convert meters back to inches for output in the comment section
'''
return meters * 100.0 / 2.54
# =======================================================================================================
# Different types of cards (see http://www.nec2.org/part_3/cards/ for card format documentation)
# Tag & segments have no units. Dimensions are in meters. Angles are in degrees.
# =======================================================================================================
def gw(tag, segments, x1, y1, z1, x2, y2, z2, radius):
''' Return the line for a GW card, a wire.
'''
gw = "GW" + dec(tag) + dec(segments)
gw += sci(x1) + sci(y1) + sci(z1)
gw += sci(x2) + sci(y2) + sci(z2)
gw += sci(radius) + "\n"
return gw
def ga(tag, segments, arcRadius, startAngle, endAngle, wireRadius):
''' Return the line for a GA card, an arc in the X-Z plane with its center at the origin
'''
notUsed = 0.0
ga = "GA" + dec(tag) + dec(segments)
ga += sci(arcRadius) + sci(startAngle) + sci(endAngle)
ga += sci(wireRadius)
ga += sci(notUsed) # Note: xnec2c fills this in with its "Segs % lambda" field, but that may be a bug
ga += sci(notUsed) + sci(notUsed) + "\n"
return ga
def gm(rotX, rotY, rotZ, trX, trY, trZ, firstTag):
''' Return the line for a GM card, move (rotate and translate).
rotX, rotY, and rotZ: angle to rotate around each axis
trX, trY, and trZ: distance to translate along each axis
firstTag: first tag# to apply transform to (subseqent tag#'s get it too... like it or not)
'''
tagIncrement = 0
newStructures = 0
gm = "GM" + dec(tagIncrement) + dec(newStructures)
gm += sci(rotX) + sci(rotY) + sci(rotZ)
gm += sci(trX) + sci(trY) + sci(trZ)
gm += sci(firstTag*1.0) + "\n"
return gm
# =======================================================================================================
# File I/O
# =======================================================================================================
def writeCardsToFile(fileName, comments, wires, footer):
''' Write a NEC2 formatted card stack to the output file
'''
nec2File = open(fileName,'w')
nec2File.write(comments.strip() + "\n")
nec2File.write( wires.strip() + "\n")
nec2File.write( footer.strip() + "\n")
nec2File.close()
def copyCardFileToConsole(fileName):
''' Dump the card stack back to the console for a quick sanity check
'''
nec2File = open(fileName,'r')
print nec2File.read(),
nec2File.close()
| {
"content_hash": "6444156d5a039fe8e0e95c599225cd01",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 114,
"avg_line_length": 36.1764705882353,
"alnum_prop": 0.5173054587688733,
"repo_name": "ckuethe/nec2-toys",
"id": "0e8b97e053d57fa180eb85aae967e9faffa4124d",
"size": "4305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oldStuff/gen1/nec2utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54561"
}
],
"symlink_target": ""
} |
"""Tests for src.analyzer.morphotactics.validator."""
from src.analyzer.morphotactics import validator
from absl.testing import absltest
from absl.testing import parameterized
class ValidateTest(parameterized.TestCase):
@parameterized.named_parameters([
{
"testcase_name": "EpsilonRuleInputAndOutput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"<eps>",
"<eps>",
],
},
{
"testcase_name":
"EpsilonRuleOutputIgBoundryRuleInput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
")([JJ]-HmsH[Derivation=Sim]",
"<eps>",
],
},
{
"testcase_name":
"EpsilonRuleOutputInflectionMorphemeRuleInput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"+NDAn[Case=Abl]",
"<eps>",
],
},
{
"testcase_name":
"EpsilonRuleOutputProperNounAnalysisRuleInput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
")+[Proper=False]",
"<eps>",
],
},
{
"testcase_name":
"EpsilonRuleOutputNumberRuleInput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"1[NN]+'[Apostrophe=True]+HncH[NumberInf=Ord]",
"<eps>",
],
},
{
"testcase_name": "EpsilonRuleOutputDecimalPointSeparatorRuleInput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
".",
"<eps>",
],
},
{
"testcase_name": "EpsilonRuleInputMetaMorphemeRuleOutput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"<eps>",
"+cAğHz",
],
},
{
"testcase_name": "EpsilonRuleInputNumberMorphophonemicsRuleOutput",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"<eps>",
"00.*ü*",
],
},
])
def test_success(self, rule_definition):
self.assertIsNone(validator.validate(rule_definition))
@parameterized.named_parameters([
{
"testcase_name": "ExtraTokens",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"<eps>",
"<eps>",
"EXTRA-TOKEN",
],
"message": "Expecting 4 tokens got 5.",
},
{
"testcase_name": "MissingTokens",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
],
"message": "Expecting 4 tokens got 2.",
},
{
"testcase_name": "EmptyFromStateToken",
"rule_definition": [
"",
"TO-STATE",
"<eps>",
"<eps>",
],
"message": "Rule definition contains empty tokens.",
},
{
"testcase_name": "EmptyToStateToken",
"rule_definition": [
"FROM-STATE",
"",
"<eps>",
"<eps>",
],
"message": "Rule definition contains empty tokens.",
},
{
"testcase_name": "EmptyRuleInputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"",
"<eps>",
],
"message": "Rule definition contains empty tokens.",
},
{
"testcase_name": "EmptyRuleOutputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"<eps>",
"",
],
"message": "Rule definition contains empty tokens.",
},
{
"testcase_name": "InvalidPrefixCharactersInInputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"foo)([TAG]-ki[Cat=Val]]",
"<eps>",
],
"message": "Invalid rule input label.",
},
{
"testcase_name": "InvalidInfixCharactersInInputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
")foo+[Proper=True]",
"<eps>",
],
"message": "Invalid rule input label.",
},
{
"testcase_name": "InvalidSuffixCharactersInInputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"5[TAG]foo",
"<eps>",
],
"message": "Invalid rule input label.",
},
{
"testcase_name": "InvalidRuleInputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"Invalid-Input",
"<eps>",
],
"message": "Invalid rule input label.",
},
{
"testcase_name": "InvalidRuleOutputToken",
"rule_definition": [
"FROM-STATE",
"TO-STATE",
"<eps>",
"Invalid-Output",
],
"message": "Invalid rule output label.",
},
])
def test_raises_exception(self, rule_definition, message):
with self.assertRaisesRegexp(validator.InvalidMorphotacticsRuleError,
message):
validator.validate(rule_definition)
if __name__ == "__main__":
absltest.main()
| {
"content_hash": "1c0949d8e932e2477a3e2366deca8ed5",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 77,
"avg_line_length": 26.804761904761904,
"alnum_prop": 0.42689642920589804,
"repo_name": "google-research/turkish-morphology",
"id": "3bca9d4cb632818acdf7054a01bec2e2b13fb7b0",
"size": "6239",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/analyzer/morphotactics/validator_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "178409"
},
{
"name": "Shell",
"bytes": "7610"
},
{
"name": "Starlark",
"bytes": "23070"
}
],
"symlink_target": ""
} |
import math
import os
import platform
import ssl
import subprocess
import sys
import distro
import docker
import compose
from ..const import IS_WINDOWS_PLATFORM
def yesno(prompt, default=None):
"""
Prompt the user for a yes or no.
Can optionally specify a default value, which will only be
used if they enter a blank line.
Unrecognised input (anything other than "y", "n", "yes",
"no" or "") will return None.
"""
answer = input(prompt).strip().lower()
if answer == "y" or answer == "yes":
return True
elif answer == "n" or answer == "no":
return False
elif answer == "":
return default
else:
return None
def input(prompt):
"""
Version of input (raw_input in Python 2) which forces a flush of sys.stdout
to avoid problems where the prompt fails to appear due to line buffering
"""
sys.stdout.write(prompt)
sys.stdout.flush()
return sys.stdin.readline().rstrip('\n')
def call_silently(*args, **kwargs):
"""
Like subprocess.call(), but redirects stdout and stderr to /dev/null.
"""
with open(os.devnull, 'w') as shutup:
try:
return subprocess.call(*args, stdout=shutup, stderr=shutup, **kwargs)
except OSError:
# On Windows, subprocess.call() can still raise exceptions. Normalize
# to POSIXy behaviour by returning a nonzero exit code.
return 1
def is_mac():
return platform.system() == 'Darwin'
def is_ubuntu():
return platform.system() == 'Linux' and distro.linux_distribution()[0] == 'Ubuntu'
def is_windows():
return IS_WINDOWS_PLATFORM
def get_version_info(scope):
versioninfo = 'docker-compose version {}, build {}'.format(
compose.__version__,
get_build_version())
if scope == 'compose':
return versioninfo
if scope == 'full':
return (
"{}\n"
"docker-py version: {}\n"
"{} version: {}\n"
"OpenSSL version: {}"
).format(
versioninfo,
docker.version,
platform.python_implementation(),
platform.python_version(),
ssl.OPENSSL_VERSION)
raise ValueError("{} is not a valid version scope".format(scope))
def get_build_version():
filename = os.path.join(os.path.dirname(compose.__file__), 'GITSHA')
if not os.path.exists(filename):
return 'unknown'
with open(filename) as fh:
return fh.read().strip()
def is_docker_for_mac_installed():
return is_mac() and os.path.isdir('/Applications/Docker.app')
def generate_user_agent():
parts = [
"docker-compose/{}".format(compose.__version__),
"docker-py/{}".format(docker.__version__),
]
try:
p_system = platform.system()
p_release = platform.release()
except OSError:
pass
else:
parts.append("{}/{}".format(p_system, p_release))
return " ".join(parts)
def human_readable_file_size(size):
suffixes = ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', ]
order = int(math.log(size, 1000)) if size else 0
if order >= len(suffixes):
order = len(suffixes) - 1
return '{:.4g} {}'.format(
size / pow(10, order * 3),
suffixes[order]
)
def binarystr_to_unicode(s):
if not isinstance(s, bytes):
return s
if IS_WINDOWS_PLATFORM:
try:
return s.decode('windows-1250')
except UnicodeDecodeError:
pass
return s.decode('utf-8', 'replace')
| {
"content_hash": "9a8e1be21883151658e95accb9c73057",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 86,
"avg_line_length": 24.71527777777778,
"alnum_prop": 0.5903343635852768,
"repo_name": "thaJeztah/compose",
"id": "6a4615a966083ce871a204aefe2a786a46b2c8cd",
"size": "3559",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "compose/cli/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2920"
},
{
"name": "Groovy",
"bytes": "12229"
},
{
"name": "Makefile",
"bytes": "1573"
},
{
"name": "PowerShell",
"bytes": "7139"
},
{
"name": "Python",
"bytes": "1108081"
},
{
"name": "Shell",
"bytes": "34381"
}
],
"symlink_target": ""
} |
import cPickle
import os
from pyglm.inference.coord_descent import coord_descent
from pyglm.plotting.plot_results import plot_results
from synth_harness import initialize_test_harness
def run_synth_test():
""" Run a test with synthetic data and MCMC inference
"""
options, popn, data, popn_true, x_true = initialize_test_harness()
# Sample random initial state
x0 = popn.sample()
ll0 = popn.compute_log_p(x0)
print "LL0: %f" % ll0
# Perform inference
x_inf = coord_descent(popn, x0=x0, maxiter=1)
ll_inf = popn.compute_log_p(x_inf)
print "LL_inf: %f" % ll_inf
# Save results
results_file = os.path.join(options.resultsDir, 'results.pkl')
print "Saving results to %s" % results_file
with open(results_file, 'w') as f:
cPickle.dump(x_inf, f, protocol=-1)
# Plot results
plot_results(popn, x_inf, popn_true, x_true, resdir=options.resultsDir)
if __name__ == "__main__":
run_synth_test()
| {
"content_hash": "91e5a6d7f499e994befc0e9f3ad550b3",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 75,
"avg_line_length": 28.8,
"alnum_prop": 0.6398809523809523,
"repo_name": "slinderman/theano_pyglm",
"id": "56706e2504386e47166ba35353258809b3b406ff",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/synth_map.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "448608"
}
],
"symlink_target": ""
} |
"""
Implements data iterators and I/O related functions for sequence-to-sequence models.
"""
import bisect
import gzip
import logging
import pickle
import random
from collections import OrderedDict
from typing import Dict, Iterator, Iterable, List, NamedTuple, Optional, Tuple
import mxnet as mx
import numpy as np
from sockeye.utils import check_condition
from . import config
from . import constants as C
logger = logging.getLogger(__name__)
def define_buckets(max_seq_len: int, step=10) -> List[int]:
"""
Returns a list of integers defining bucket boundaries.
Bucket boundaries are created according to the following policy:
We generate buckets with a step size of step until the final bucket fits max_seq_len.
We then limit that bucket to max_seq_len (difference between semi-final and final bucket may be less than step).
:param max_seq_len: Maximum bucket size.
:param step: Distance between buckets.
:return: List of bucket sizes.
"""
buckets = [bucket_len for bucket_len in range(step, max_seq_len + step, step)]
buckets[-1] = max_seq_len
return buckets
def define_parallel_buckets(max_seq_len_source: int,
max_seq_len_target: int,
bucket_width=10,
length_ratio=1.0) -> List[Tuple[int, int]]:
"""
Returns (source, target) buckets up to (max_seq_len_source, max_seq_len_target). The longer side of the data uses
steps of bucket_width while the shorter side uses steps scaled down by the average target/source length ratio. If
one side reaches its max_seq_len before the other, width of extra buckets on that side is fixed to that max_seq_len.
:param max_seq_len_source: Maximum source bucket size.
:param max_seq_len_target: Maximum target bucket size.
:param bucket_width: Width of buckets on longer side.
:param length_ratio: Length ratio of data (target/source).
"""
source_step_size = bucket_width
target_step_size = bucket_width
if length_ratio >= 1.0:
# target side is longer -> scale source
source_step_size = max(1, int(bucket_width / length_ratio))
else:
# source side is longer, -> scale target
target_step_size = max(1, int(bucket_width * length_ratio))
source_buckets = define_buckets(max_seq_len_source, step=source_step_size)
target_buckets = define_buckets(max_seq_len_target, step=target_step_size)
# Extra buckets
if len(source_buckets) < len(target_buckets):
source_buckets += [source_buckets[-1] for _ in range(len(target_buckets) - len(source_buckets))]
elif len(target_buckets) < len(source_buckets):
target_buckets += [target_buckets[-1] for _ in range(len(source_buckets) - len(target_buckets))]
# minimum bucket size is 2 (as we add BOS symbol to target side)
source_buckets = [max(2, b) for b in source_buckets]
target_buckets = [max(2, b) for b in target_buckets]
parallel_buckets = list(zip(source_buckets, target_buckets))
# deduplicate for return
return list(OrderedDict.fromkeys(parallel_buckets))
def get_bucket(seq_len: int, buckets: List[int]) -> Optional[int]:
"""
Given sequence length and a list of buckets, return corresponding bucket.
:param seq_len: Sequence length.
:param buckets: List of buckets.
:return: Chosen bucket.
"""
bucket_idx = bisect.bisect_left(buckets, seq_len)
if bucket_idx == len(buckets):
return None
return buckets[bucket_idx]
def read_parallel_corpus(data_source: str,
data_target: str,
vocab_source: Dict[str, int],
vocab_target: Dict[str, int]) -> Tuple[List[List[int]], List[List[int]]]:
"""
Loads source and target data, making sure they have the same length.
:param data_source: Path to source training data.
:param data_target: Path to target training data.
:param vocab_source: Source vocabulary.
:param vocab_target: Target vocabulary.
:return: Tuple of (source sentences, target sentences).
"""
source_sentences = read_sentences(data_source, vocab_source, add_bos=False)
target_sentences = read_sentences(data_target, vocab_target, add_bos=True)
check_condition(len(source_sentences) == len(target_sentences),
"Number of source sentences does not match number of target sentences")
return source_sentences, target_sentences
def get_training_data_iters(source: str, target: str,
validation_source: str, validation_target: str,
vocab_source: Dict[str, int], vocab_target: Dict[str, int],
batch_size: int,
fill_up: str,
max_seq_len_source: int,
max_seq_len_target: int,
bucketing: bool,
bucket_width: int) -> Tuple['ParallelBucketSentenceIter', 'ParallelBucketSentenceIter']:
"""
Returns data iterators for training and validation data.
:param source: Path to source training data.
:param target: Path to target training data.
:param validation_source: Path to source validation data.
:param validation_target: Path to target validation data.
:param vocab_source: Source vocabulary.
:param vocab_target: Target vocabulary.
:param batch_size: Batch size.
:param fill_up: Fill-up strategy for buckets.
:param max_seq_len_source: Maximum source sequence length.
:param max_seq_len_target: Maximum target sequence length.
:param bucketing: Whether to use bucketing.
:param bucket_width: Size of buckets.
:return: Tuple of (training data iterator, validation data iterator).
"""
logger.info("Creating train data iterator")
train_source_sentences, train_target_sentences = read_parallel_corpus(source,
target,
vocab_source,
vocab_target)
length_ratio = sum(len(t) / float(len(s)) for t, s in zip(train_target_sentences, train_source_sentences)) / len(
train_target_sentences)
logger.info("Average training target/source length ratio: %.2f", length_ratio)
# define buckets
buckets = define_parallel_buckets(max_seq_len_source,
max_seq_len_target,
bucket_width,
length_ratio) if bucketing else [
(max_seq_len_source, max_seq_len_target)]
train_iter = ParallelBucketSentenceIter(train_source_sentences,
train_target_sentences,
buckets,
batch_size,
vocab_target[C.EOS_SYMBOL],
C.PAD_ID,
vocab_target[C.UNK_SYMBOL],
fill_up=fill_up)
logger.info("Creating validation data iterator")
val_source_sentences, val_target_sentences = read_parallel_corpus(validation_source,
validation_target,
vocab_source,
vocab_target)
val_iter = ParallelBucketSentenceIter(val_source_sentences,
val_target_sentences,
buckets,
batch_size,
vocab_target[C.EOS_SYMBOL],
C.PAD_ID,
vocab_target[C.UNK_SYMBOL],
fill_up=fill_up)
return train_iter, val_iter
class DataConfig(config.Config):
"""
Stores data paths from training.
"""
def __init__(self,
source: str,
target: str,
validation_source: str,
validation_target: str,
vocab_source: str,
vocab_target: str) -> None:
super().__init__()
self.source = source
self.target = target
self.validation_source = validation_source
self.validation_target = validation_target
self.vocab_source = vocab_source
self.vocab_target = vocab_target
def smart_open(filename: str, mode="rt", ftype="auto", errors='replace'):
"""
Returns a file descriptor for filename with UTF-8 encoding.
If mode is "rt", file is opened read-only.
If ftype is "auto", uses gzip iff filename endswith .gz.
If ftype is {"gzip","gz"}, uses gzip.
Note: encoding error handling defaults to "replace"
:param filename: The filename to open.
:param mode: Reader mode.
:param ftype: File type. If 'auto' checks filename suffix for gz to try gzip.open
:param errors: Encoding error handling during reading. Defaults to 'replace'
:return: File descriptor
"""
if ftype == 'gzip' or ftype == 'gz' or (ftype == 'auto' and filename.endswith(".gz")):
return gzip.open(filename, mode=mode, encoding='utf-8', errors=errors)
else:
return open(filename, mode=mode, encoding='utf-8', errors=errors)
def read_content(path: str, limit=None) -> Iterator[List[str]]:
"""
Returns a list of tokens for each line in path up to a limit.
:param path: Path to files containing sentences.
:param limit: How many lines to read from path.
:return: Iterator over lists of words.
"""
with smart_open(path) as indata:
for i, line in enumerate(indata):
if limit is not None and i == limit:
break
yield list(get_tokens(line))
def get_tokens(line: str) -> Iterator[str]:
"""
Yields tokens from input string.
:param line: Input string.
:return: Iterator over tokens.
"""
for token in line.rstrip().split():
if len(token) > 0:
yield token
def tokens2ids(tokens: Iterable[str], vocab: Dict[str, int]) -> List[int]:
"""
Returns sequence of ids given a sequence of tokens and vocab.
:param tokens: List of tokens.
:param vocab: Vocabulary (containing UNK symbol).
:return: List of word ids.
"""
return [vocab.get(w, vocab[C.UNK_SYMBOL]) for w in tokens]
def read_sentences(path: str, vocab: Dict[str, int], add_bos=False, limit=None) -> List[List[int]]:
"""
Reads sentences from path and creates word id sentences.
:param path: Path to read data from.
:param vocab: Vocabulary mapping.
:param add_bos: Whether to add Beginning-Of-Sentence (BOS) symbol.
:param limit: Read limit.
:return: List of integer sequences.
"""
assert C.UNK_SYMBOL in vocab
assert C.UNK_SYMBOL in vocab
assert vocab[C.PAD_SYMBOL] == C.PAD_ID
assert C.BOS_SYMBOL in vocab
assert C.EOS_SYMBOL in vocab
sentences = []
for sentence_tokens in read_content(path, limit):
sentence = tokens2ids(sentence_tokens, vocab)
check_condition(sentence, "Empty sentence in file %s" % path)
if add_bos:
sentence.insert(0, vocab[C.BOS_SYMBOL])
sentences.append(sentence)
logger.info("%d sentences loaded from '%s'", len(sentences), path)
return sentences
def get_default_bucket_key(buckets: List[Tuple[int, int]]) -> Tuple[int, int]:
"""
Returns the default bucket from a list of buckets, i.e. the largest bucket.
:param buckets: List of buckets.
:return: The largest bucket in the list.
"""
return max(buckets)
def get_parallel_bucket(buckets: List[Tuple[int, int]],
length_source: int,
length_target: int) -> Optional[Tuple[int, Tuple[int, int]]]:
"""
Returns bucket index and bucket from a list of buckets, given source and target length.
Returns (None, None) if no bucket fits.
:param buckets: List of buckets.
:param length_source: Length of source sequence.
:param length_target: Length of target sequence.
:return: Tuple of (bucket index, bucket), or (None, None) if not fitting.
"""
bucket = None, None
for j, (source_bkt, target_bkt) in enumerate(buckets):
if source_bkt >= length_source and target_bkt >= length_target:
bucket = j, (source_bkt, target_bkt)
break
return bucket
# TODO: consider more memory-efficient data reading (load from disk on demand)
# TODO: consider using HDF5 format for language data
class ParallelBucketSentenceIter(mx.io.DataIter):
"""
A Bucket sentence iterator for parallel data. Randomly shuffles the data after every call to reset().
Data is stored in NDArrays for each epoch for fast indexing during iteration.
:param source_sentences: List of source sentences (integer-coded).
:param target_sentences: List of target sentences (integer-coded).
:param buckets: List of buckets.
:param batch_size: Batch_size of generated data batches.
Incomplete batches are discarded if fill_up == None, or filled up according to the fill_up strategy.
:param fill_up: If not None, fill up bucket data to a multiple of batch_size to avoid discarding incomplete batches.
for each bucket. If set to 'replicate', sample examples from the bucket and use them to fill up.
:param eos_id: Word id for end-of-sentence.
:param pad_id: Word id for padding symbols.
:param unk_id: Word id for unknown symbols.
:param dtype: Data type of generated NDArrays.
"""
def __init__(self,
source_sentences: List[List[int]],
target_sentences: List[List[int]],
buckets: List[Tuple[int, int]],
batch_size: int,
eos_id: int,
pad_id: int,
unk_id: int,
fill_up: Optional[str] = None,
source_data_name=C.SOURCE_NAME,
source_data_length_name=C.SOURCE_LENGTH_NAME,
target_data_name=C.TARGET_NAME,
label_name=C.TARGET_LABEL_NAME,
dtype='float32'):
super(ParallelBucketSentenceIter, self).__init__()
self.buckets = list(buckets)
self.buckets.sort()
self.default_bucket_key = get_default_bucket_key(self.buckets)
self.batch_size = batch_size
self.eos_id = eos_id
self.pad_id = pad_id
self.unk_id = unk_id
self.dtype = dtype
self.source_data_name = source_data_name
self.source_data_length_name = source_data_length_name
self.target_data_name = target_data_name
self.label_name = label_name
self.fill_up = fill_up
# TODO: consider avoiding explicitly creating length and label arrays to save host memory
self.data_source = [[] for _ in self.buckets]
self.data_length = [[] for _ in self.buckets]
self.data_target = [[] for _ in self.buckets]
self.data_label = [[] for _ in self.buckets]
# assign sentence pairs to buckets
self._assign_to_buckets(source_sentences, target_sentences)
# convert to single numpy array for each bucket
self._convert_to_array()
self.provide_data = [
mx.io.DataDesc(name=source_data_name, shape=(batch_size, self.default_bucket_key[0]), layout=C.BATCH_MAJOR),
mx.io.DataDesc(name=source_data_length_name, shape=(batch_size,), layout=C.BATCH_MAJOR),
mx.io.DataDesc(name=target_data_name, shape=(batch_size, self.default_bucket_key[1]), layout=C.BATCH_MAJOR)]
self.provide_label = [
mx.io.DataDesc(name=label_name, shape=(self.batch_size, self.default_bucket_key[1]), layout=C.BATCH_MAJOR)]
self.data_names = [self.source_data_name, self.source_data_length_name, self.target_data_name]
self.label_names = [self.label_name]
# create index tuples (i,j) into buckets: i := bucket index ; j := row index of bucket array
self.idx = []
for i, buck in enumerate(self.data_source):
rest = len(buck) % batch_size
if rest > 0:
logger.info("Discarding %d samples from bucket %s due to incomplete batch", rest, self.buckets[i])
idxs = [(i, j) for j in range(0, len(buck) - batch_size + 1, batch_size)]
self.idx.extend(idxs)
self.curr_idx = 0
# holds NDArrays
self.indices = [] # This will define how the data arrays will be organized
self.nd_source = []
self.nd_length = []
self.nd_target = []
self.nd_label = []
self.reset()
def _assign_to_buckets(self, source_sentences, target_sentences):
ndiscard = 0
tokens_source = 0
tokens_target = 0
num_of_unks_source = 0
num_of_unks_target = 0
for source, target in zip(source_sentences, target_sentences):
tokens_source += len(source)
tokens_target += len(target)
num_of_unks_source += source.count(self.unk_id)
num_of_unks_target += target.count(self.unk_id)
buck_idx, buck = get_parallel_bucket(self.buckets, len(source), len(target))
if buck is None:
ndiscard += 1
continue
buff_source = np.full((buck[0],), self.pad_id, dtype=self.dtype)
buff_target = np.full((buck[1],), self.pad_id, dtype=self.dtype)
buff_label = np.full((buck[1],), self.pad_id, dtype=self.dtype)
buff_source[:len(source)] = source
buff_target[:len(target)] = target
buff_label[:len(target)] = target[1:] + [self.eos_id]
self.data_source[buck_idx].append(buff_source)
self.data_length[buck_idx].append(len(source))
self.data_target[buck_idx].append(buff_target)
self.data_label[buck_idx].append(buff_label)
logger.info("Source words: %d", tokens_source)
logger.info("Target words: %d", tokens_target)
logger.info("Vocab coverage source: %.0f%%", (1 - num_of_unks_source / tokens_source) * 100)
logger.info("Vocab coverage target: %.0f%%", (1 - num_of_unks_target / tokens_target) * 100)
logger.info('Total: {0} samples in {1} buckets'.format(len(self.data_source), len(self.buckets)))
nsamples = 0
for bkt, buck in zip(self.buckets, self.data_length):
logger.info("bucket of {0} : {1} samples".format(bkt, len(buck)))
nsamples += len(buck)
check_condition(nsamples > 0, "0 data points available in the data iterator. "
"%d data points have been discarded because they "
"didn't fit into any bucket. Consider increasing "
"the --max-seq-len to fit your data." % ndiscard)
logger.info("%d sentence pairs out of buckets", ndiscard)
logger.info("fill up mode: %s", self.fill_up)
logger.info("")
def _convert_to_array(self):
for i in range(len(self.data_source)):
self.data_source[i] = np.asarray(self.data_source[i], dtype=self.dtype)
self.data_length[i] = np.asarray(self.data_length[i], dtype=self.dtype)
self.data_target[i] = np.asarray(self.data_target[i], dtype=self.dtype)
self.data_label[i] = np.asarray(self.data_label[i], dtype=self.dtype)
n = len(self.data_source[i])
if n % self.batch_size != 0:
buck_shape = self.buckets[i]
rest = self.batch_size - n % self.batch_size
if self.fill_up == 'pad':
raise NotImplementedError
elif self.fill_up == 'replicate':
logger.info(
"Replicating %d random examples from bucket %s to size it to multiple of batch size %d", rest,
buck_shape, self.batch_size)
random_indices = np.random.randint(self.data_source[i].shape[0], size=rest)
self.data_source[i] = np.concatenate((self.data_source[i], self.data_source[i][random_indices, :]),
axis=0)
self.data_length[i] = np.concatenate((self.data_length[i], self.data_length[i][random_indices]),
axis=0)
self.data_target[i] = np.concatenate((self.data_target[i], self.data_target[i][random_indices, :]),
axis=0)
self.data_label[i] = np.concatenate((self.data_label[i], self.data_label[i][random_indices, :]),
axis=0)
def reset(self):
"""
Resets and reshuffles the data.
"""
self.curr_idx = 0
# shuffle indices
random.shuffle(self.idx)
self.nd_source = []
self.nd_length = []
self.nd_target = []
self.nd_label = []
self.indices = []
for i in range(len(self.data_source)):
# shuffle indices within each bucket
self.indices.append(np.random.permutation(len(self.data_source[i])))
self._append_ndarrays(i, self.indices[-1])
def _append_ndarrays(self, bucket: int, shuffled_indices: np.array):
"""
Appends the actual data, selected by the given indices, to the NDArrays
of the appropriate bucket. Use when reshuffling the data.
:param bucket: Current bucket.
:param shuffled_indices: Indices indicating which data to select.
"""
self.nd_source.append(mx.nd.array(self.data_source[bucket].take(shuffled_indices, axis=0), dtype=self.dtype))
self.nd_length.append(mx.nd.array(self.data_length[bucket].take(shuffled_indices, axis=0), dtype=self.dtype))
self.nd_target.append(mx.nd.array(self.data_target[bucket].take(shuffled_indices, axis=0), dtype=self.dtype))
self.nd_label.append(mx.nd.array(self.data_label[bucket].take(shuffled_indices, axis=0), dtype=self.dtype))
def iter_next(self) -> bool:
"""
True if iterator can return another batch
"""
return self.curr_idx != len(self.idx)
def next(self) -> mx.io.DataBatch:
"""
Returns the next batch from the data iterator.
"""
if not self.iter_next():
raise StopIteration
i, j = self.idx[self.curr_idx]
self.curr_idx += 1
source = self.nd_source[i][j:j + self.batch_size]
length = self.nd_length[i][j:j + self.batch_size]
target = self.nd_target[i][j:j + self.batch_size]
data = [source, length, target]
label = [self.nd_label[i][j:j + self.batch_size]]
provide_data = [mx.io.DataDesc(name=n, shape=x.shape, layout=C.BATCH_MAJOR) for n, x in
zip(self.data_names, data)]
provide_label = [mx.io.DataDesc(name=n, shape=x.shape, layout=C.BATCH_MAJOR) for n, x in
zip(self.label_names, label)]
# TODO: num pad examples is not set here if fillup strategy would be padding
return mx.io.DataBatch(data, label,
pad=0, index=None, bucket_key=self.buckets[i],
provide_data=provide_data, provide_label=provide_label)
def save_state(self, fname: str):
"""
Saves the current state of iterator to a file, so that iteration can be
continued. Note that the data is not saved, i.e. the iterator must be
initialized with the same parameters as in the first call.
:param fname: File name to save the information to.
"""
with open(fname, "wb") as fp:
pickle.dump(self.idx, fp)
pickle.dump(self.curr_idx, fp)
np.save(fp, self.indices)
def load_state(self, fname: str):
"""
Loads the state of the iterator from a file.
:param fname: File name to load the information from.
"""
with open(fname, "rb") as fp:
self.idx = pickle.load(fp)
self.curr_idx = pickle.load(fp)
self.indices = np.load(fp)
# Because of how checkpointing is done (pre-fetching the next batch in
# each iteration), curr_idx should be always >= 1
assert self.curr_idx >= 1
# Right after loading the iterator state, next() should be called
self.curr_idx -= 1
self.nd_source = []
self.nd_length = []
self.nd_target = []
self.nd_label = []
for i in range(len(self.data_source)):
self._append_ndarrays(i, self.indices[i])
| {
"content_hash": "b814ee90cd281811ef685ed161db5b20",
"timestamp": "",
"source": "github",
"line_count": 573,
"max_line_length": 120,
"avg_line_length": 44.01396160558464,
"alnum_prop": 0.587827121332276,
"repo_name": "KellenSunderland/sockeye",
"id": "058a2b9cf9a01454e538c54ead09fcc85c25910f",
"size": "25786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sockeye/data_io.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "467469"
},
{
"name": "Shell",
"bytes": "1662"
}
],
"symlink_target": ""
} |
import cufunction
import pdb
import sys, os, os.path, shutil
def print_code(code, dest_path, name, ext):
o_file = open(os.path.join(dest_path, name + ext), 'w')
print >>o_file, code
o_file.close()
def mkdir(dirname):
exists = os.access(dirname, os.F_OK)
if not exists:
os.mkdir(dirname)
def save_code(objects, source_file=None):
if source_file:
dirname, ext = os.path.splitext(source_file)
mkdir(dirname)
shutil.copy(source_file, dirname)
else:
dirname = '.'
for name, v in objects.items():
if isinstance(v, cufunction.CuFunction):
code = v.get_code()
#There may be many variants, let's just grab the first
implementations = code.values()
if len(implementations) > 0:
selected_impl = implementations[0]
extensions = ('.py', '.cpp', '.cu')
#make target directory
dest_path = os.path.join(dirname, name)
mkdir(dest_path)
map(lambda x, y: print_code(x, dest_path, name, y),
selected_impl,
extensions)
| {
"content_hash": "511866014eae8be124127415acf5100f",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 67,
"avg_line_length": 30.512820512820515,
"alnum_prop": 0.5453781512605042,
"repo_name": "shyamalschandra/copperhead",
"id": "713811d4828aef6f5e2bcdc64c8179ac926681cf",
"size": "1807",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "copperhead/runtime/utility.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "70429"
},
{
"name": "Python",
"bytes": "303202"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Mate Soos
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
import sys
import gzip
import re
import ntpath
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-n", "--num",
dest="num", type=int,
help="Number of reconfs")
parser.add_option("--ignore", "-i",
dest="ignore", type=str,
help="Ignore these reconfs")
(options, args) = parser.parse_args()
ignore = {}
if options.ignore:
for r in options.ignore.split(","):
r = r.strip()
r = int(r)
ignore[r] = True
if options.num is None:
print "ERROR: You must give the number of reconfs"
exit(-1)
print """
#ifndef _FEATURES_TO_RECONF_H_
#define _FEATURES_TO_RECONF_H_
#include "solvefeatures.h"
#include <iostream>
using std::cout;
using std::endl;
namespace CMSat {
"""
for i in range(options.num):
if i not in ignore:
print "double get_score%d(const SolveFeatures& feat, const int verb);" %i
print """
int get_reconf_from_features(const SolveFeatures& feat, const int verb)
{
\tdouble best_score = 0.0;
\tint best_val = 0;
\tdouble score;
"""
for i in range(options.num):
if i in ignore:
continue
print """
\tscore = get_score%d(feat, verb);
\tif (verb >= 2)
\t\tcout << "c Score for reconf %d is " << score << endl;
\tif (best_score < score) {
\t\tbest_score = score;
\t\tbest_val = %d;
\t}
""" % (i, i, i)
print """
\tif (verb >= 2)
\t\tcout << "c Winning reconf is " << best_val << endl;
\treturn best_val;
}
"""
def read_one_reconf(reconf_num):
sys.stderr.write("Parsing reconf num %d\n" % reconf_num)
f = open("outs/out%d.rules" % reconf_num)
num_conds = 0
cond_no = 0
num_rules = 0
rule_no = 0
string = ""
print """
double get_score%d(const SolveFeatures& feat, const int verb)
{""" % reconf_num
for line in f:
if "id=" in line:
continue
line = line.strip()
line = line.split(" ")
dat = {}
for elem in line:
elems = elem.split("=")
elems = [e.strip("\"") for e in elems]
# print "elems:", elems
dat[elems[0]] = elems[1]
if "conds" in dat:
assert num_conds == cond_no
num_conds = int(dat["conds"])
rule_class = dat["class"]
cond_no = 0
confidence = float(dat["confidence"])
continue
if "entries" in dat:
continue
if "rules" in dat:
num_rules = int(dat["rules"])
if "default" in dat:
default = dat["default"]
if default == "+":
print "\tdouble default_val = %.2f;\n" % (1.0)
else:
print "\tdouble default_val = %.2f;\n" % (0.0)
print "\tdouble total_plus = 0.0;"
print "\tdouble total_neg = 0.0;"
continue
#process rules
if cond_no == 0:
string = "\tif ("
else:
string += " &&\n\t\t"
# print "dat:", dat
string += "(feat.%s %s %.5f)" % (dat["att"], dat["result"],
float(dat["cut"]))
cond_no += 1
#end rules
if cond_no == num_conds:
string += ")\n\t{"
print string
string = ""
if rule_class == "+":
string += "\t\ttotal_plus += %.3f;" % confidence
else:
string += "\t\ttotal_neg += %.3f;" % confidence
print string
print "\t}"
rule_no += 1
# print dat
print "\t// num_rules:", num_rules
print "\t// rule_no:", rule_no
sys.stderr.write("num rules: %s rule_no: %s\n" % (num_rules, rule_no))
assert num_rules == rule_no
print "\t// default is:", default
print """
\tif (total_plus == 0.0 && total_neg == 0.0) {
\t\treturn default_val;
\t}
\tif (verb >= 2) {
\t\t//cout << "c plus: " << total_plus << " , neg: " << total_neg << endl;
\t}
\treturn total_plus - total_neg;
}
"""
for i in range(options.num):
if i not in ignore:
read_one_reconf(i)
print """
} //end namespace
#endif //_FEATURES_TO_RECONF_H_
"""
| {
"content_hash": "eeaf8f56b63c73c86318f8b7289a06b1",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 81,
"avg_line_length": 24.17241379310345,
"alnum_prop": 0.5455471775015285,
"repo_name": "aristk/static-analyser",
"id": "67940962280a16806921ea4ca66e7c8f7608d49a",
"size": "6053",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "provers/cryptominisat-5.0.1/scripts/reconf/tocpp.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "23983"
},
{
"name": "C++",
"bytes": "5151003"
},
{
"name": "CMake",
"bytes": "108033"
},
{
"name": "CSS",
"bytes": "8270"
},
{
"name": "JavaScript",
"bytes": "1490"
},
{
"name": "Lex",
"bytes": "976"
},
{
"name": "M4",
"bytes": "25387"
},
{
"name": "Makefile",
"bytes": "29488"
},
{
"name": "Objective-C",
"bytes": "5467"
},
{
"name": "PHP",
"bytes": "30396"
},
{
"name": "Python",
"bytes": "623623"
},
{
"name": "Shell",
"bytes": "44743"
},
{
"name": "TeX",
"bytes": "454106"
},
{
"name": "Yacc",
"bytes": "3032"
}
],
"symlink_target": ""
} |
import json
from browser import window, document
from browser import ajax, console
from controller import Controller
from editor import Editor
from broker import Broker
class Context(object):
""" application context object provides an interface to server-side api calls """
def __init__(self):
self.seq = 0
self.endpoint = ''
self.broker = Broker
self.log = console.log
self._get(window.Bitwrap.config, self.configure)
self.doc = document
def time(self):
""" return time in microseconds """
return window.Date.now()
def configure(self, req):
""" load config from server """
_config = json.loads(req.text)
self.endpoint = _config['endpoint']
_editor = Editor(context=self, config=_config)
if _config.get('use_websocket', False):
self.broker(config=_config, editor=_editor)
Controller(context=self, editor=_editor)
@staticmethod
def echo(req):
""" write return value to consoel """
try:
txt = getattr(req, 'response')
console.log(txt)
except:
console.log(req)
@staticmethod
def clear(txt=''):
""" clear python terminal """
document['code'].value = txt
def upload_pnml(self, name, body, callback=None, errback=None):
""" upload_pnml(filename, body, callback=None, errback=None): upload xml petri-net definition"""
req = ajax.ajax()
if callback:
req.bind('complete', callback)
else:
req.bind('complete', self.echo)
req.open('POST', self.endpoint + '/petrinet/' + name, True)
req.set_header('content-type', 'application/xml')
req.send(body)
def _rpc(self, method, params=[], callback=None, errback=None):
""" _rpc(method, params=[], callback=None, errback=None): make JSONRPC POST to backend """
self.seq = self.seq + 1
req = ajax.ajax()
if callback:
req.bind('complete', callback)
else:
req.bind('complete', self.echo)
req.open('POST', self.endpoint + '/api', True)
req.set_header('content-type', 'application/json')
req.send(json.dumps({'id': self.seq, 'method': method, 'params': params}))
def _get(self, resource, callback=None, errback=None):
""" _get(resource, callback=None, errback=None): make http GET to backend """
req = ajax.ajax()
if callback:
req.bind('complete', callback)
else:
req.bind('complete', self.echo)
req.open('GET', self.endpoint + resource, True)
req.send()
def schemata(self, callback=None):
""" schemata(callback=None): retrieve list of available state machine definitions """
self._get('/schemata', callback=callback)
def state(self, schema, oid, callback=None):
""" state(schema, oid, callback=None): get current state """
self._get('/state/%s/%s' % (schema, oid), callback=callback)
def machine(self, schema, callback=None):
""" machine(schema, callback=None): get machine definition """
self._get('/machine/%s' % schema, callback=callback)
def dispatch(self, schema, oid, action, payload={}, callback=None):
""" dispatch(schema, oid, action, payload={}): dispatch new event to socketio """
if self.broker.socket:
self.broker.commit(schema, oid, action, payload=payload)
else:
self.commit(schema, oid, action, payload=payload, callback=callback)
def commit(self, schema, oid, action, payload={}, callback=None):
""" commit(schema, oid, action, payload={}, callback=None): post new event to api """
req = ajax.ajax()
if callback:
req.bind('complete', callback)
else:
req.bind('complete', self.echo)
req.open('POST', self.endpoint + '/dispatch/%s/%s/%s' % (schema, oid, action), True)
req.set_header('content-type', 'application/json')
data = json.dumps(payload)
req.send(str(data))
def stream(self, schema, oid, callback=None):
""" stream(schema, oid, callback=None): get all events """
self._get('/stream/%s/%s' % (schema, oid), callback=callback)
def event(self, schema, eventid, callback=None):
""" event(schema, eventid, callback=None): get a single event """
self._get('/event/%s/%s' % (schema, eventid), callback=callback)
def exists(self, schema=None, oid=None, callback=None, errback=None):
""" exists(schema=None, oid=None, callback=None, errback=None): test for existance of schema and/or stream """
if not oid:
self._rpc('schema_exists', params=[schema], callback=callback, errback=errback)
else:
self._rpc('stream_exists', params=[schema, oid], callback=callback, errback=errback)
def load(self, machine_name, new_schema):
""" load(machine_name, new_schema): load machine definition as db schema """
self._rpc('schema_create', params=[machine_name, new_schema])
def create(self, schema, oid):
""" create(schema, oid): create a new stream """
self._rpc('stream_create', params=[schema, oid])
def destroy(self, schema, callback=None):
""" destroy(schema, callback=None): drop from db / destroys a schema and all events """
self._rpc('schema_destroy', params=[schema], callback=callback)
| {
"content_hash": "3ec5cdbbd4b3c84ff66de1aa351f9a48",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 118,
"avg_line_length": 37.91724137931035,
"alnum_prop": 0.6038559476173154,
"repo_name": "bitwrap/bitwrap-io",
"id": "a3deabb2b44411dc2b3ed78569e5182f5861a36c",
"size": "5498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bitwrap_io/_brython/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3632"
},
{
"name": "Python",
"bytes": "92695"
},
{
"name": "Shell",
"bytes": "62"
}
],
"symlink_target": ""
} |
import os
import unittest
from unittest import mock
from alligator.backends.locmem_backend import Client as LocmemClient
CONN_STRING = os.environ.get("ALLIGATOR_CONN")
@unittest.skipIf(
not CONN_STRING.startswith("locmem:"), "Skipping Locmem tests"
)
class LocmemTestCase(unittest.TestCase):
def setUp(self):
super(LocmemTestCase, self).setUp()
self.backend = LocmemClient(CONN_STRING)
# Just reach in & clear things out.
LocmemClient.queues = {}
LocmemClient.task_data = {}
def test_init(self):
self.assertEqual(LocmemClient.queues, {})
self.assertEqual(LocmemClient.task_data, {})
def test_len(self):
LocmemClient.queues = {
"all": [["a", None], ["b", 12345678], ["c", None]]
}
self.assertEqual(self.backend.len("all"), 3)
self.assertEqual(self.backend.len("something"), 0)
def test_drop_all(self):
LocmemClient.queues = {
"all": [["a", None], ["b", 12345678], ["c", None]]
}
LocmemClient.task_data = {
"a": {"whatev": True},
"b": "grump",
"d": "another",
}
self.backend.drop_all("all")
self.assertEqual(LocmemClient.queues, {"all": []})
self.assertEqual(LocmemClient.task_data, {"d": "another"})
def test_push(self):
self.assertEqual(LocmemClient.queues, {})
self.assertEqual(LocmemClient.task_data, {})
self.backend.push("all", "hello", {"whee": 1})
self.assertEqual(LocmemClient.queues, {"all": [["hello", None]]})
self.assertEqual(LocmemClient.task_data, {"hello": {"whee": 1}})
def test_push_delayed(self):
self.assertEqual(LocmemClient.queues, {})
self.assertEqual(LocmemClient.task_data, {})
self.backend.push("all", "hello", {"whee": 1}, delay_until=12345798)
self.assertEqual(LocmemClient.queues, {"all": [["hello", 12345798]]})
self.assertEqual(LocmemClient.task_data, {"hello": {"whee": 1}})
def test_pop(self):
self.backend.push("all", "hello", {"whee": 1})
data = self.backend.pop("all")
self.assertEqual(data, {"whee": 1})
self.assertEqual(LocmemClient.queues, {"all": []})
self.assertEqual(LocmemClient.task_data, {})
@mock.patch("time.time")
def test_pop_skip_delayed(self, mock_time):
mock_time.return_value = 12345678
self.backend.push("all", "hello", {"whee": 1}, delay_until=12345798)
self.backend.push("all", "hallo", {"whoo": 2})
# Here, we're checking to make sure a task that's waiting for a
# "future" time isn't pulled off the queue.
data = self.backend.pop("all")
self.assertEqual(data, {"whoo": 2})
self.assertEqual(LocmemClient.queues, {"all": [["hello", 12345798]]})
self.assertEqual(LocmemClient.task_data, {"hello": {"whee": 1}})
def test_get(self):
self.backend.push("all", "hello", {"whee": 1})
self.backend.push("all", "world", {"whee": 2})
data = self.backend.get("all", "world")
self.assertEqual(data, {"whee": 2})
self.assertEqual(LocmemClient.queues, {"all": [["hello", None]]})
self.assertEqual(LocmemClient.task_data, {"hello": {"whee": 1}})
# Try a non-existent one.
data = self.backend.get("all", "nopenopenope")
self.assertEqual(data, None)
| {
"content_hash": "c85efd0813fe1d966f5c27e79cc06671",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 77,
"avg_line_length": 35.2680412371134,
"alnum_prop": 0.5919321835720549,
"repo_name": "toastdriven/alligator",
"id": "b5453dd4511b9fc2b887a6fe1d1cedf2ecf393fe",
"size": "3421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_locmem_backend.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "66064"
},
{
"name": "Shell",
"bytes": "472"
}
],
"symlink_target": ""
} |
DOCUMENTATION = '''
---
module: ec2_group
version_added: "1.3"
short_description: maintain an ec2 VPC security group.
description:
- maintains ec2 security groups. This module has a dependency on python-boto >= 2.5
options:
name:
description:
- Name of the security group.
required: true
description:
description:
- Description of the security group.
required: true
vpc_id:
description:
- ID of the VPC to create the group in.
required: false
rules:
description:
- List of firewall inbound rules to enforce in this group (see example).
required: false
rules_egress:
description:
- List of firewall outbound rules to enforce in this group (see example).
required: false
version_added: "1.6"
region:
description:
- the EC2 region to use
required: false
default: null
aliases: []
state:
version_added: "1.4"
description:
- Create or delete a security group
required: false
default: 'present'
choices: [ "present", "absent" ]
aliases: []
purge_rules:
version_added: "1.8"
description:
- Purge existing rules on security group that are not found in rules
required: false
default: 'true'
aliases: []
purge_rules_egress:
version_added: "1.8"
description:
- Purge existing rules_egress on security group that are not found in rules_egress
required: false
default: 'true'
aliases: []
extends_documentation_fragment: aws
notes:
- If a rule declares a group_name and that group doesn't exist, it will be
automatically created. In that case, group_desc should be provided as well.
The module will refuse to create a depended-on group without a description.
'''
EXAMPLES = '''
- name: example ec2 group
ec2_group:
name: example
description: an example EC2 group
vpc_id: 12345
region: eu-west-1a
aws_secret_key: SECRET
aws_access_key: ACCESS
rules:
- proto: tcp
from_port: 80
to_port: 80
cidr_ip: 0.0.0.0/0
- proto: tcp
from_port: 22
to_port: 22
cidr_ip: 10.0.0.0/8
- proto: udp
from_port: 10050
to_port: 10050
cidr_ip: 10.0.0.0/8
- proto: udp
from_port: 10051
to_port: 10051
group_id: sg-12345678
- proto: all
# the containing group name may be specified here
group_name: example
rules_egress:
- proto: tcp
from_port: 80
to_port: 80
cidr_ip: 0.0.0.0/0
group_name: example-other
# description to use if example-other needs to be created
group_desc: other example EC2 group
'''
try:
import boto.ec2
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
def make_rule_key(prefix, rule, group_id, cidr_ip):
"""Creates a unique key for an individual group rule"""
if isinstance(rule, dict):
proto, from_port, to_port = [rule.get(x, None) for x in ('proto', 'from_port', 'to_port')]
else: # isinstance boto.ec2.securitygroup.IPPermissions
proto, from_port, to_port = [getattr(rule, x, None) for x in ('ip_protocol', 'from_port', 'to_port')]
key = "%s-%s-%s-%s-%s-%s" % (prefix, proto, from_port, to_port, group_id, cidr_ip)
return key.lower().replace('-none', '-None')
def addRulesToLookup(rules, prefix, dict):
for rule in rules:
for grant in rule.grants:
dict[make_rule_key(prefix, rule, grant.group_id, grant.cidr_ip)] = (rule, grant)
def get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id):
"""
Returns tuple of (group_id, ip) after validating rule params.
rule: Dict describing a rule.
name: Name of the security group being managed.
groups: Dict of all available security groups.
AWS accepts an ip range or a security group as target of a rule. This
function validate the rule specification and return either a non-None
group_id or a non-None ip range.
"""
group_id = None
group_name = None
ip = None
target_group_created = False
if 'group_id' in rule and 'cidr_ip' in rule:
module.fail_json(msg="Specify group_id OR cidr_ip, not both")
elif 'group_name' in rule and 'cidr_ip' in rule:
module.fail_json(msg="Specify group_name OR cidr_ip, not both")
elif 'group_id' in rule and 'group_name' in rule:
module.fail_json(msg="Specify group_id OR group_name, not both")
elif 'group_id' in rule:
group_id = rule['group_id']
elif 'group_name' in rule:
group_name = rule['group_name']
if group_name in groups:
group_id = groups[group_name].id
elif group_name == name:
group_id = group.id
groups[group_id] = group
groups[group_name] = group
else:
if not rule.get('group_desc', '').strip():
module.fail_json(msg="group %s will be automatically created by rule %s and no description was provided" % (group_name, rule))
if not module.check_mode:
auto_group = ec2.create_security_group(group_name, rule['group_desc'], vpc_id=vpc_id)
group_id = auto_group.id
groups[group_id] = auto_group
groups[group_name] = auto_group
target_group_created = True
elif 'cidr_ip' in rule:
ip = rule['cidr_ip']
return group_id, ip, target_group_created
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
description=dict(required=True),
vpc_id=dict(),
rules=dict(),
rules_egress=dict(),
state = dict(default='present', choices=['present', 'absent']),
purge_rules=dict(default=True, required=False, type='bool'),
purge_rules_egress=dict(default=True, required=False, type='bool'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
name = module.params['name']
description = module.params['description']
vpc_id = module.params['vpc_id']
rules = module.params['rules']
rules_egress = module.params['rules_egress']
state = module.params.get('state')
purge_rules = module.params['purge_rules']
purge_rules_egress = module.params['purge_rules_egress']
changed = False
ec2 = ec2_connect(module)
# find the group if present
group = None
groups = {}
for curGroup in ec2.get_all_security_groups():
groups[curGroup.id] = curGroup
groups[curGroup.name] = curGroup
if curGroup.name == name and (vpc_id is None or curGroup.vpc_id == vpc_id):
group = curGroup
# Ensure requested group is absent
if state == 'absent':
if group:
'''found a match, delete it'''
try:
group.delete()
except Exception, e:
module.fail_json(msg="Unable to delete security group '%s' - %s" % (group, e))
else:
group = None
changed = True
else:
'''no match found, no changes required'''
# Ensure requested group is present
elif state == 'present':
if group:
'''existing group found'''
# check the group parameters are correct
group_in_use = False
rs = ec2.get_all_instances()
for r in rs:
for i in r.instances:
group_in_use |= reduce(lambda x, y: x | (y.name == 'public-ssh'), i.groups, False)
if group.description != description:
if group_in_use:
module.fail_json(msg="Group description does not match, but it is in use so cannot be changed.")
# if the group doesn't exist, create it now
else:
'''no match found, create it'''
if not module.check_mode:
group = ec2.create_security_group(name, description, vpc_id=vpc_id)
# When a group is created, an egress_rule ALLOW ALL
# to 0.0.0.0/0 is added automatically but it's not
# reflected in the object returned by the AWS API
# call. We re-read the group for getting an updated object
# amazon sometimes takes a couple seconds to update the security group so wait till it exists
while len(ec2.get_all_security_groups(filters={ 'group_id': group.id, })) == 0:
time.sleep(0.1)
group = ec2.get_all_security_groups(group_ids=(group.id,))[0]
changed = True
else:
module.fail_json(msg="Unsupported state requested: %s" % state)
# create a lookup for all existing rules on the group
if group:
# Manage ingress rules
groupRules = {}
addRulesToLookup(group.rules, 'in', groupRules)
# Now, go through all provided rules and ensure they are there.
if rules:
for rule in rules:
group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id)
if target_group_created:
changed = True
if rule['proto'] in ('all', '-1', -1):
rule['proto'] = -1
rule['from_port'] = None
rule['to_port'] = None
# If rule already exists, don't later delete it
ruleId = make_rule_key('in', rule, group_id, ip)
if ruleId in groupRules:
del groupRules[ruleId]
# Otherwise, add new rule
else:
grantGroup = None
if group_id:
grantGroup = groups[group_id]
if not module.check_mode:
group.authorize(rule['proto'], rule['from_port'], rule['to_port'], ip, grantGroup)
changed = True
# Finally, remove anything left in the groupRules -- these will be defunct rules
if purge_rules:
for (rule, grant) in groupRules.itervalues() :
grantGroup = None
if grant.group_id:
grantGroup = groups[grant.group_id]
if not module.check_mode:
group.revoke(rule.ip_protocol, rule.from_port, rule.to_port, grant.cidr_ip, grantGroup)
changed = True
# Manage egress rules
groupRules = {}
addRulesToLookup(group.rules_egress, 'out', groupRules)
# Now, go through all provided rules and ensure they are there.
if rules_egress:
for rule in rules_egress:
group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id)
if target_group_created:
changed = True
if rule['proto'] in ('all', '-1', -1):
rule['proto'] = -1
rule['from_port'] = None
rule['to_port'] = None
# If rule already exists, don't later delete it
ruleId = make_rule_key('out', rule, group_id, ip)
if ruleId in groupRules:
del groupRules[ruleId]
# Otherwise, add new rule
else:
grantGroup = None
if group_id:
grantGroup = groups[group_id].id
if not module.check_mode:
ec2.authorize_security_group_egress(
group_id=group.id,
ip_protocol=rule['proto'],
from_port=rule['from_port'],
to_port=rule['to_port'],
src_group_id=grantGroup,
cidr_ip=ip)
changed = True
elif vpc_id and not module.check_mode:
# when using a vpc, but no egress rules are specified,
# we add in a default allow all out rule, which was the
# default behavior before egress rules were added
default_egress_rule = 'out--1-None-None-None-0.0.0.0/0'
if default_egress_rule not in groupRules:
ec2.authorize_security_group_egress(
group_id=group.id,
ip_protocol=-1,
from_port=None,
to_port=None,
src_group_id=None,
cidr_ip='0.0.0.0/0'
)
changed = True
else:
# make sure the default egress rule is not removed
del groupRules[default_egress_rule]
# Finally, remove anything left in the groupRules -- these will be defunct rules
if purge_rules_egress:
for (rule, grant) in groupRules.itervalues():
grantGroup = None
if grant.group_id:
grantGroup = groups[grant.group_id].id
if not module.check_mode:
ec2.revoke_security_group_egress(
group_id=group.id,
ip_protocol=rule.ip_protocol,
from_port=rule.from_port,
to_port=rule.to_port,
src_group_id=grantGroup,
cidr_ip=grant.cidr_ip)
changed = True
if group:
module.exit_json(changed=changed, group_id=group.id)
else:
module.exit_json(changed=changed, group_id=None)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| {
"content_hash": "afb5021837f64f1936b961bfd9429087",
"timestamp": "",
"source": "github",
"line_count": 390,
"max_line_length": 142,
"avg_line_length": 35.735897435897435,
"alnum_prop": 0.5564325177584846,
"repo_name": "ygol/dotfiles",
"id": "b502bd1db53c0ddc26ff8de273d0563afc272a62",
"size": "13981",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "bin/.venv-ansible-venv/lib/python2.6/site-packages/ansible/modules/core/cloud/amazon/ec2_group.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "513"
},
{
"name": "JavaScript",
"bytes": "10707"
},
{
"name": "Lua",
"bytes": "35950"
},
{
"name": "Perl",
"bytes": "8914"
},
{
"name": "PowerShell",
"bytes": "51840"
},
{
"name": "Python",
"bytes": "7417846"
},
{
"name": "Ruby",
"bytes": "24958"
},
{
"name": "Shell",
"bytes": "316253"
},
{
"name": "Vim script",
"bytes": "1437097"
}
],
"symlink_target": ""
} |
"""Implementation of embedding layer with shared weights."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf # pylint: disable=g-bad-import-order
from official.transformer.model import model_utils
from official.utils.accelerator import tpu as tpu_utils
class EmbeddingSharedWeights(tf.layers.Layer):
"""Calculates input embeddings and pre-softmax linear with shared weights."""
def __init__(self, vocab_size, hidden_size, method="gather"):
"""Specify characteristic parameters of embedding layer.
Args:
vocab_size: Number of tokens in the embedding. (Typically ~32,000)
hidden_size: Dimensionality of the embedding. (Typically 512 or 1024)
method: Strategy for performing embedding lookup. "gather" uses tf.gather
which performs well on CPUs and GPUs, but very poorly on TPUs. "matmul"
one-hot encodes the indicies and formulates the embedding as a sparse
matrix multiplication. The matmul formulation is wasteful as it does
extra work, however matrix multiplication is very fast on TPUs which
makes "matmul" considerably faster than "gather" on TPUs.
"""
super(EmbeddingSharedWeights, self).__init__()
self.vocab_size = vocab_size
self.hidden_size = hidden_size
if method not in ("gather", "matmul"):
raise ValueError("method {} must be 'gather' or 'matmul'".format(method))
self.method = method
def build(self, _):
with tf.variable_scope("embedding_and_softmax", reuse=tf.AUTO_REUSE):
# Create and initialize weights. The random normal initializer was chosen
# randomly, and works well.
self.shared_weights = tf.get_variable(
"weights", [self.vocab_size, self.hidden_size],
initializer=tf.random_normal_initializer(
0., self.hidden_size ** -0.5))
self.built = True
def call(self, x):
"""Get token embeddings of x.
Args:
x: An int64 tensor with shape [batch_size, length]
Returns:
embeddings: float32 tensor with shape [batch_size, length, embedding_size]
padding: float32 tensor with shape [batch_size, length] indicating the
locations of the padding tokens in x.
"""
with tf.name_scope("embedding"):
# Create binary mask of size [batch_size, length]
mask = tf.to_float(tf.not_equal(x, 0))
if self.method == "gather":
embeddings = tf.gather(self.shared_weights, x)
embeddings *= tf.expand_dims(mask, -1)
else: # matmul
embeddings = tpu_utils.embedding_matmul(
embedding_table=self.shared_weights,
values=tf.cast(x, dtype=tf.int32),
mask=mask
)
# embedding_matmul already zeros out masked positions, so
# `embeddings *= tf.expand_dims(mask, -1)` is unnecessary.
# Scale embedding by the sqrt of the hidden size
embeddings *= self.hidden_size ** 0.5
return embeddings
def linear(self, x):
"""Computes logits by running x through a linear layer.
Args:
x: A float32 tensor with shape [batch_size, length, hidden_size]
Returns:
float32 tensor with shape [batch_size, length, vocab_size].
"""
with tf.name_scope("presoftmax_linear"):
batch_size = tf.shape(x)[0]
length = tf.shape(x)[1]
x = tf.reshape(x, [-1, self.hidden_size])
logits = tf.matmul(x, self.shared_weights, transpose_b=True)
return tf.reshape(logits, [batch_size, length, self.vocab_size])
| {
"content_hash": "1f005864f584e9831c1cb78233e3283e",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 80,
"avg_line_length": 37.44210526315789,
"alnum_prop": 0.6671352263143098,
"repo_name": "mlperf/training_results_v0.5",
"id": "cc189626bc919fe165f902437fa4c9cf7fb2bc69",
"size": "4246",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "v0.5.0/google/cloud_v3.8/resnet-tpuv3-8/code/resnet/model/models/official/transformer/model/embedding_layer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5720"
},
{
"name": "C++",
"bytes": "1288180"
},
{
"name": "CMake",
"bytes": "40880"
},
{
"name": "CSS",
"bytes": "32420"
},
{
"name": "Cuda",
"bytes": "1362093"
},
{
"name": "Dockerfile",
"bytes": "19488"
},
{
"name": "Go",
"bytes": "1088660"
},
{
"name": "HTML",
"bytes": "19756888"
},
{
"name": "Java",
"bytes": "45405"
},
{
"name": "JavaScript",
"bytes": "302838"
},
{
"name": "Jupyter Notebook",
"bytes": "9104667"
},
{
"name": "Lua",
"bytes": "4430"
},
{
"name": "Makefile",
"bytes": "3652"
},
{
"name": "Python",
"bytes": "31508548"
},
{
"name": "Scala",
"bytes": "106211"
},
{
"name": "Shell",
"bytes": "409745"
}
],
"symlink_target": ""
} |
import pymel.core as pm
from functools import partial
import System.utils as utils
reload(utils)
class AttachGeoToBlueprint_ShelfTool:
def AttachWithParenting(self):
self.parenting = True
self.skinning = False
self.ProcessInitialSelection()
def AttachWithSkinning(self):
self.skinning = True
self.parenting = False
self.ProcessInitialSelection()
def ProcessInitialSelection(self):
selection = pm.ls(selection = True)
self.blueprintJoints = []
self.geometry = []
self.blueprintJoints = self.FindBlueprintJoints(selection)
self.geometry = self.FindGeometry(selection)
if self.blueprintJoints == None:
pm.headsUpMessage("Please select the blueprint joint(s) you wish to attach geometry to.")
pm.scriptJob(event = ["SelectionChanged", self.SelectBlueprintJoint_callBack], runOnce = True)
elif self.geometry == None:
pm.headsUpMessage("Please select the geometry you wish to attach to the specified blueprint joint.")
pm.scriptJob(event = ["SelectionChanged", self.SelectGeometry_callBack], runOnce = True)
else:
self.AttachGeometryToBlueprint_attachment()
def SelectBlueprintJoint_callBack(self):
selection = pm.ls(selection = True)
self.blueprintJoints = self.FindBlueprintJoints(selection)
if self.blueprintJoints == None:
pm.confirmDialog(title = "Attach Geometry to Blueprint", message = "Blueprint joint selection invalid. \nTerminating tool.", button = ["Accept"], defaultButton = "Accept")
elif self.geometry == None:
pm.headsUpMessage("Please select the geometry you wish to attach to the specified blueprint joint(s).")
pm.scriptJob(event = ["SelectionChanged", self.SelectGeometry_callBack], runOnce = True)
else:
self.AttachGeometryToBlueprint_attachment()
def SelectGeometry_callBack(self):
selection = pm.ls(selection = True)
self.geometry = self.FindGeometry(selection)
if self.geometry == None:
pm.confirmDialog(title = "Attach Geometry to Blueprint", message = "Geometry selection invalid. \nTerminating tool.", button = ["Accept"], defaultButton = "Accept")
else:
self.AttachGeometryToBlueprint_attachment()
def AttachGeometryToBlueprint_attachment(self):
if self.parenting:
self.AttachGeometryToBlueprint_parenting(self.blueprintJoints[0], self.geometry)
else:
print self.blueprintJoints
self.AttachGeometryToBlueprint_skinning(self.blueprintJoints, self.geometry)
def FindBlueprintJoints(self, _selection):
selectedBlueprintJoints = []
for obj in _selection:
if pm.objectType(obj, isType = "joint"):
jointNameInfo = utils.StripAllNamespaces(obj)
if jointNameInfo != None:
jointName = jointNameInfo[1]
if jointName.find("blueprint_") == 0:
selectedBlueprintJoints.append(obj)
if len(selectedBlueprintJoints) > 0:
return selectedBlueprintJoints
else:
return None
def FindGeometry(self, _selection):
_selection = pm.ls(_selection, transforms = True)
nonJointSelection = []
for node in _selection:
if not pm.objectType(node, isType = "joint"):
nonJointSelection.append(node)
if len(nonJointSelection) > 0:
return nonJointSelection
else:
return None
def AttachGeometryToBlueprint_parenting(self, _blueprintJoint, _geometry):
jointName = utils.StripAllNamespaces(_blueprintJoint)[1]
parentGroup = pm.group(empty = True, name = "%s_geoAttach_parentGrp#" %jointName)
if len(_geometry) == 1:
geoParent = pm.listRelatives(_geometry, parent = True)
if len(geoParent) != 0:
pm.parent(parentGroup, geoParent)
pm.parentConstraint(_blueprintJoint, parentGroup, maintainOffset = False, name = "%s_parentConstraint" %jointName)
pm.scaleConstraint(_blueprintJoint, parentGroup, maintainOffset = False, name = "%s_scaleConstraint" %jointName)
geoParent = parentGroup
children = pm.listRelatives(_blueprintJoint, children = True)
children = pm.ls(children, type = "joint")
if len(children) != 0:
childJoint = children[0]
scaleGroup = pm.group(empty = True, name = "%s_geoAttach_scaleGrp" %childJoint)
pm.parent(scaleGroup, parentGroup, relative = True)
geoParent = scaleGroup
originalTxValue = pm.getAttr("%s.translateX" %childJoint)
scaleFactor = pm.shadingNode("multiplyDivide", asUtility = True, name = "%s_scaleFactor" %scaleGroup)
pm.setAttr("%s.operation" %scaleFactor)
pm.connectAttr("%s.translateX" %childJoint, "%s.input1X" %scaleFactor)
pm.setAttr("%s.input2X" %scaleFactor, originalTxValue)
pm.connectAttr("%s.outputX" %scaleFactor, "%s.scaleX" %scaleGroup)
for geo in _geometry:
pm.parent(geo, geoParent, absolute = True)
def AttachGeometryToBlueprint_skinning(self, _blueprintJoints, _geometry):
blueprintModules = set([])
# Get namespaces of joint chains
for joint in _blueprintJoints:
blueprintNamespace = utils.StripLeadingNamespace(joint)[0]
blueprintModules.add(blueprintNamespace)
# Unlock containers
for module in blueprintModules:
pm.lockNode("%s:module_container" %module, lock = False, lockUnpublished = False)
# Attach all geometry to joint chain
for geo in _geometry:
pm.skinCluster(_blueprintJoints, geo, toSelectedBones = True, name = "%s_skinCluster" %geo)
# Lock containers
for module in blueprintModules:
pm.lockNode("%s:module_container" %module, lock = True, lockUnpublished = True) | {
"content_hash": "89883afc5b3cfbb20d63ffc84a61b581",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 174,
"avg_line_length": 29.57837837837838,
"alnum_prop": 0.7200292397660819,
"repo_name": "Shadowtags/ModularRiggingTool",
"id": "954034b6ae30394961654fc5c63044d8f8c377c0",
"size": "5472",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nwModularRiggingTool/Modules/System/attachGeoToBlueprint.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mathematica",
"bytes": "3100880"
},
{
"name": "Python",
"bytes": "263312"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0018_alter_job_last_export_run'),
]
operations = [
migrations.AlterField(
model_name='datamodelpreset',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='exportprofile',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='jobpermission',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='license',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='userjobactivity',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='userlicense',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| {
"content_hash": "3dad20afc320d4261baa9a90b279d34e",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 111,
"avg_line_length": 36.24390243902439,
"alnum_prop": 0.5895020188425303,
"repo_name": "venicegeo/eventkit-cloud",
"id": "c28bb5021a219925dc33dcd17259b582b2a31b5f",
"size": "1535",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "eventkit_cloud/jobs/migrations/0019_auto_20211014_2210.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "90420"
},
{
"name": "Dockerfile",
"bytes": "2466"
},
{
"name": "HTML",
"bytes": "85741"
},
{
"name": "Java",
"bytes": "123740"
},
{
"name": "JavaScript",
"bytes": "597810"
},
{
"name": "Python",
"bytes": "1145801"
},
{
"name": "Shell",
"bytes": "6127"
},
{
"name": "TypeScript",
"bytes": "1456680"
}
],
"symlink_target": ""
} |
r"""
.. currentmodule:: pylayers.gis.selectl
.. autosummary::
"""
from __future__ import print_function
import os
import pdb
from PIL import Image
import numpy as np
from pylayers.util import geomutil as geu
from pylayers.util import pyutil as pyu
import pylayers.util.plotutil as plu
import matplotlib.pyplot as plt
#from pylayers.util.easygui import *
from matplotlib.widgets import RectangleSelector
import copy
class SelectL(object):
""" Associates a Layout and a figure
'l' : select activelayer
'i' : back to init state
'e' : edit segment
'CTLR + t' : translate structure
'h' : add subsegment
'd' : delete subsegment
'r' : refresh
'o' : toggle overlay
'm' : toggle mode (point or segment)
'z' : change display parameters
'q' : quit interactive mode
'x' : save .str2 file
'w' : display all layers
"""
def __init__(self,L,fig,ax):
""" SelectL is a class which associates a Layout and a figure
Parameters
----------
L : Layout
fig : figure
ax : axes
"""
self.fig=fig
self.ax=ax
self.L = L
self.text = self.ax.text(0.05, 0.95, 'Selected : none',
transform=self.ax.transAxes, va='top')
self.set_origin = False
self.set_x = False
self.set_y = False
self.pt = []
self.seg = []
self.coseg = []
self.pt1 = np.array([])
self.pt2 = np.array([])
self.selected_pt1 = 0
self.selected_pt2 = 0
self.selected_edge1 = 0
self.selected_edge2 = 0
self.current_layer = self.L.display['activelayer']
self.npsel = 0
self.nedge_sel = 0
self.indp = 0
self.state = 'Init'
self.evt=''
self.statename={'Init':'Point/Segment Selection',
'CP':'Create Point',
'SP1':'Select Point 1',
'SP2':'Select Point 2, Click Again for Creating Segment',
'SS':'Select Segment',
'SSS':'Select Sub Segment',
'CPS':'Click again for Split Segment',
'CPSS':'Create Point On Sub Segment',
'SMP': 'Multiple Points Selection',
'SMS': 'Multiple Segments Selection'
}
self.help={'':'',
'Init':'Select Point or Segment/ F1: Multiple selection/ F2: Create Point/ CTRL+q: Quit',
'CP':'Create Point/ +CTRL same x/ +SHIFT same y',
'SP1':'Select Point/ Click another point to create segment',
'SP2':'Click Again for Creating Segment',
'SS':'e: edit segment properties, h: add a sub-segment',
'SSS':'Select Sub Segment',
'CPS':'Click again for Split Segment',
'CPSS':'Create Point On Sub Segment',
'SMP': 't: toggle point/segment, Shift + select : add selected points, CTRL + select : remove selected points',
'SMS': 't: toggle point/segment, e: Edit Selected Segments Propeties'
}
self.nsel = 0
self.ax.axis(self.L.display['box'])
plt.title(self.statename[self.state])
self.undoGs=[]
self.bundo=False
self.update_state()
self.shift_is_held = False
self.ctrl_is_held = False
self.alt_is_held = False
self.selectpt=[]
self.selectseg=[]
self.selected='pt'
# save matplotlib config
self.rcconf = {}
self.rcconf['keymap.save']= plt.rcParams['keymap.save']
plt.rcParams['keymap.save']=[]
self.ddoc = {'l' : 'select activelayer',
'i' :' back to init state',
'j' :' vertical and horizontal scaling',
'e' :' edit segment',
'b' :' edit segment keyboard',
'CTRL + t' :' translate structure',
'h' :' add subsegment',
'd' :' delete selected object',
'r' :' refresh',
'o' :' toggle overlay (<> CP mode) set origin (CP mode) ',
'm' :' toggle mode (point or segment)',
'n' : 'toggle node label display ',
'z' : 'change display parameters',
'x' : 'save .str2 and .ini file',
'w' :' display all layers',
'v' :' flip layout w.r.t y axis',
'f' :' toggle points nodes display',
'g' :' toggle segments nodes display',
'=' :' increment layer ',
',' : 'this help',
'delete' :'delete selected',
'$' :' decrement layer '}
def show(self,fig,ax,clear=False, dnodes=True, dedges=True, font_size=14, title=''):
""" show layout
Parameters
----------
clear : boolean
dnodes : boolean
dedges : boolean
dlabels : boolean
font_size : integer
title : string
"""
if title=='':
title = self.statename[self.state]
axis = self.ax.axis()
self.L.display['clear'] = clear
self.L.display['fontsize'] = font_size
self.L.display['title'] = title
self.fig,self.ax = self.L.showGs(fig=self.fig,ax=self.ax,axis=axis,isonb=True)
return(self.fig,self.ax)
def plotselptseg(self,pt,color='y',ms=10,marker='o'):
""" plot selected point or segments
Parameters
----------
pt : list
list of points or segments to plot
"""
if len(pt)>0:
pts = np.array([self.L.Gs.pos[x] for x in pt])
p1 = self.ax.plot(pts[:,0], pts[:,1],marker=marker,
visible=True,
color =color,
ms=10,
alpha=0.4)
self.fig.canvas.draw()
return self.fig,self.ax
def OnPress(self,event,verbose=True):
""" Keyboard event handler
Parameters
----------
event
verbose
"""
# fig = plt.gcf()
# ax = plt.gca()
# selected
self.nsel = 0
self.ptsel = np.array([])
self.evt = event.key
if event.key == 'shift':
self.shift_is_held = True
if event.key == 'control':
self.ctrl_is_held = True
if event.key == 'alt':
self.alt_is_held = True
if verbose:
try:
print("Evenement :", self.evt,self.ddoc[self.evt])
except:
print(self.evt + 'N/A')
self.new_state()
def OnRelease(self, event):
if event.key == 'shift':
self.shift_is_held = False
if event.key == 'control':
self.ctrl_is_held = False
if event.key == 'alt':
self.alt_is_held = False
def OnClickRelease(self, event):
pass
def OnMotion(self, event):
pass
def OnClick(self, event):
""" handle OnClick event
Parameters
----------
event :
See Also
--------
pylayers.gis.layout.Layout.ispoint
"""
fig = self.fig#plt.gcf()
ax = self.ax#plt.gca()
self.nsel = 0
self.ptsel = np.array([])
xmin, xmax, ymin, ymax = self.ax.axis()
#print( xmin,xmax,ymin,ymax)
dx = xmax - xmin
dy = ymax - ymin
dd = np.minimum(dx, dy)
if event.button == 1 and event.inaxes:
self.evt = 'lclic'
x = event.xdata
y = event.ydata
self.ptsel = np.array((x, y))
self.nsel = self.L.ispoint(self.ptsel, dd / 100)
if event.button == 2 and event.inaxes:
self.evt = 'cclic'
x = event.xdata
y = event.ydata
self.ptsel = np.array((x, y))
self.nsel = self.L.ispoint(self.ptsel, dd / 100)
if event.button == 3 and event.inaxes:
self.evt = 'rclic'
x = event.xdata
y = event.ydata
self.ptsel = np.array((x, y))
self.nsel = self.L.ispoint(self.ptsel, dd / 100)
#print("Selected point coord : ", self.ptsel)
#print("Selected point number: ", self.nsel)
if self.nsel > 0:
print("Selected segment : ", self.nsel)
self.new_state()
def format_coord(self,x, y):
col = int(x+0.5)
row = int(y+0.5)
string = 'x=%1.4f, y=%1.4f'%(x, y)
try:
string = string + ' ' + self.L.Gs.node[self.nsel]['name']
except:
pass
try:
string = string + ' with ' +str(len(self.L.Gs.node[self.nsel]['ss_name'])) + 'subseg(s)'
except:
pass
string = string + ' ///' +self.help[self.state]
return string
# if col>=0 and col<numcols and row>=0 and row<numrows:
# z = X[row,col]
# return 'x=%1.4f, y=%1.4f, z=%1.4f'%(x, y, z)
# else:
# return 'x=%1.4f, y=%1.4f'%(x, y)
def update_state(self):
""" update state
"""
# fig = plt.gcf()
# ax = plt.gca()
if not self.bundo:
self.undoGs.append(self.L.Gs.copy())
if len(self.undoGs) > 50:
self.undoGs.pop(0)
self.ax.format_coord=self.format_coord
if self.state == 'Init':
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.ax.title.set_text(self.statename[self.state])
self.selected_edge1 = 0
self.selected_pt1 = 0
self.selected_pt2 = 0
self.selectpt=[]
self.selectseg=[]
try:
del self.pt_previous
except:
pass
try:
self.selector.set_active(False)
print('inhib select')
except:
pass
#ax.title.set_text(self.state)
#ax.title.set_text('Init : '
# +self.L.display['activelayer'])
try:
self.p1[0].set_visible(False)
except:
pass
try:
self.p2[0].set_visible(False)
except:
pass
try:
self.segment[0].set_visible(False)
except:
pass
#
# If Layout has no point go to CP state
#
if self.L.Np==0:
self.state='CP'
self.update_state()
if self.state == 'SP1':
self.fig,self.ax = self.show(self.fig,self.ax,clear=False)
self.ax.title.set_text(self.statename[self.state])
print('Selected node : '+str(self.nsel))
#ax.title.set_text(self.nsel))
self.selected_pt1 = self.nsel
self.pt1 = np.array(self.L.Gs.pos[self.nsel]).reshape(2, 1)
self.pt_previous = self.pt1
self.p1 = self.ax.plot([self.pt1[0]], [self.pt1[1]], 'o', visible=True)
self.p1[0].set_color('yellow')
self.p1[0].set_ms(10)
self.p1[0].set_alpha(0.4)
try:
self.p2.set_visible(False)
except:
pass
if self.state == 'SP2':
self.p1[0].set_color('green')
self.ax.title.set_text(self.statename[self.state])
#ax.title.set_text('Selected node : %d ' % (self.nsel))
print('Selected node : ' + str(self.nsel))
self.selected_pt2 = self.nsel
self.pt2 = np.array(self.L.Gs.pos[self.nsel]).reshape(2, 1)
self.pt_previous = self.pt2
self.p2 = self.ax.plot([self.pt2[0]], [self.pt2[1]], 'o', visible=True)
self.p2[0].set_color('green')
self.p2[0].set_ms(10)
self.p2[0].set_alpha(0.4)
#ax.title.set_text('SP2')
if self.state == 'SS':
self.ax.title.set_text(self.statename[self.state])
try:
self.p1[0].set_visible(False)
except:
pass
try:
self.p2[0].set_visible(False)
except:
pass
self.selected_edge1 = self.nsel
nse = self.nsel
ta, he = self.L.Gs.neighbors(nse)
pta = np.array(self.L.Gs.pos[ta])
phe = np.array(self.L.Gs.pos[he])
alpha = self.L.display['alpha']
self.current_layer = self.L.Gs.node[nse]['name']
self.L.display['activelayer'] = self.current_layer
#self.seg = linet(ax,pta,phe,alpha,'red',3.5)
segdico = self.L.Gs.node[nse]
self.fig,self.ax=self.show(self.fig,self.ax,clear=False)
self.segment = self.ax.plot([pta[0],phe[0]],
[pta[1],phe[1]],
'r',linewidth=3, visible=True)
if 'ss_name' in segdico:
cosegname = segdico['ss_name']
titre = 'Select Segment : %d (%d->%d) Layer : %s Coseg : %s ' % (nse, ta, he, self.current_layer, cosegname)
else:
titre = 'Select Segment : %d (%d->%d) Layer : %s' % (nse, ta, he, self.L.Gs.node[nse]['name'])
print(titre)
#ax.title.set_text(titre)
self.L.show_nodes(ndlist=[nse], size=200, color='r', alpha=0.5)
if self.state == 'SSS':
self.ax.title.set_text(self.statename[self.state])
nse = self.selected_edge1
segdico = self.L.Gs.node[nse]
z = segdico['ss_z']
#ax.title.set_text('SSS : '+self.L.Gs.node[nse]['name']+' ['+str(z[0])+']')
print(self.L.Gs.node[nse]['name']+' ['+str(z[0])+']')
self.segment[0].set_color('blue')
#
# Create Point state
#
if self.state == 'CP':
self.ax.title.set_text(self.statename[self.state])
try:
self.segment[0].set_visible(False)
except:
pass
try:
self.segment1[0].set_visible(False)
except:
pass
try:
self.segment2[0].set_visible(False)
except:
pass
print('lclic : free point, +CTRL same x, +SHIFT: same y')
self.fig,self.ax=self.show(self.fig,self.ax,clear=False)
self.L.g2npy()
#
# Create Point on Segment state
#
if self.state == 'CPS':
self.ax.title.set_text(self.statename[self.state])
self.selected_edge1 = self.nsel
ta, he = self.L.Gs.neighbors(self.nsel)
self.pta1 = np.array(self.L.Gs.pos[ta])
self.phe1 = np.array(self.L.Gs.pos[he])
self.current_layer = self.L.Gs.node[self.nsel]['name']
self.L.display['activelayer'] = self.current_layer
self.segment1 = self.ax.plot([self.pta1[0],self.phe1[0]],
[self.pta1[1],self.phe1[1]],
'g',linewidth=3, visible=True)
try:
self.segment2[0].set_visible(False)
except:
pass
if self.state == 'CPSS':
self.ax.title.set_text(self.statename[self.state])
self.selected_edge2 = self.nsel
ta, he = self.L.Gs.neighbors(self.nsel)
self.pta2 = np.array(self.L.Gs.pos[ta])
self.phe2 = np.array(self.L.Gs.pos[he])
self.current_layer = self.L.Gs.node[self.nsel]['name']
self.L.display['activelayer'] = self.current_layer
self.segment2 = self.ax.plot([self.pta2[0],self.phe2[0]],
[self.pta2[1],self.phe2[1]],
'c',linewidth=3, visible=True)
if 'SM' in self.state:
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.ax.title.set_text(self.statename[self.state])
self.selected_edge1 = 0
self.selected_pt1 = 0
self.selected_pt2 = 0
try:
del self.pt_previous
except:
pass
# self.state='SM'
#print self.state
#print self.nsel
#print self.selected_pt1
#print self.selected_pt2
self.fig.canvas.draw()
return(self.fig,self.ax)
def new_state(self):
""" layout editor state machine
Parameters
----------
'l' : select activelayer
'i' : back to init state
'j' : vertical and horizontal scaling
'e' : edit segment
'b' : edit segment keyboard
'CTRL + t' : translate structure
'h' : add subsegment
'd |Del' : delete subsegment
'r |F5' : refresh
'o' : toggle overlay (<> CP mode)
set origin (CP mode)
'm' : toggle mode (point or segment)
'n' : toggle node label display
'z' : change display parameters
'CTRL+q' : quit
'x |CTRL+s' : save .str2 and .ini file
'w' : display all layers
'v' : flip layout w.r.t y axis
'f' : toggle points nodes display
'g' : toggle segments nodes display
'=' : increment layer
'$' : decrement layer
"""
fig = plt.gcf()
ax = plt.gca()
sl = self.L.sl
cold = pyu.coldict()
#print "In State ",self.state
#print "In Event ",self.evt
#
# flip layout in y
#
if self.evt == ',':
for k in self.ddoc.keys():
print(k,self.ddoc[k])
if self.evt == 'v':
for n in self.L.Gs.pos:
self.L.Gs.pos[n]=(self.L.Gs.pos[n][0],-self.L.Gs.pos[n][1])
self.update_state()
return
#
# translation of layout (open a box)
#
# if self.evt == 't' :
# offx,offy = offsetbox()
# for n in self.L.Gs.pos:
# self.L.Gs.pos[n]=(self.L.Gs.pos[n][0]+offx,self.L.Gs.pos[n][1]+offy)
# self.update_state()
# return
if self.evt=='escape':
self.state='Init'
self.update_state()
self.fig.canvas.draw()
return
if self.evt=='ctrl+z':
self.bundo=True
print( len(self.L.Gs))
if len (self.undoGs) >2:
oGs=self.undoGs.pop(-1)
oGs=self.undoGs.pop(-1)
self.L.Gs=oGs
self.L.g2npy()
self.update_state()
self.bundo=False
return
if self.evt=='t':
if 'SM' in self.state:
self.update_state()
# fig=plt.gcf()
# ax=plt.gca()
if self.selected == 'pt':
self.plotselptseg(self.selectseg,color='r')
PP=self.L.pt[:,self.L.tahe[:,self.L.tgs[self.selectseg]]]
if PP.shape[-1]!=0:
self.fig,self.ax=plu.displot(PP[:,0],PP[:,1],fig=self.fig,ax=self.ax,color='r',linewidth=3,alpha=0.4)
plt.draw()
self.selected='seg'
self.state='SMS'
else:
self.fig,self.ax= self.plotselptseg(self.selectpt)
self.selected='pt'
self.state='SMP'
self.ax.title.set_text(self.statename[self.state])
# self.update_state()
if self.evt == '3':
self.L._show3()
return
# Choose layers to visualized
#
if self.evt == 'l':
listchoices = self.L.name.keys()
self.L.display['layers'] = multchoicebox('message',
'titre', listchoices)
self.state = 'Init'
self.update_state()
return
#
# 'f' toggle points nodes display
#
if self.evt=='f':
self.L.display['nodes'] = not self.L.display['nodes']
print (self.L.display['nodes'])
self.update_state()
return
#
# 'g' toggle segment nodes dislay
#
if self.evt=='g':
self.L.display['ednodes'] = not self.L.display['ednodes']
print (self.L.display['ednodes'])
self.update_state()
return
#
# '=' Increment layer
#
if self.evt=='=':
N = len(self.L.display['layerset'])
index = self.L.display['layerset'].index(self.L.display['activelayer'])
self.L.display['activelayer'] = self.L.display['layerset'][(index+1) % N]
self.current_layer = self.L.display['activelayer']
print (self.current_layer)
self.update_state()
return
#
# '=' Decrement layer
#
if self.evt=='$':
N = len(self.L.display['layerset'])
index = self.L.display['layerset'].index(self.L.display['activelayer'])
self.L.display['activelayer'] = self.L.display['layerset'][(index-1) % N]
self.current_layer = self.L.display['activelayer']
print (self.current_layer)
self.update_state()
return
#
# 'i' : Back to init state
#
if self.evt == 'i':
self.state = 'Init'
self.update_state()
return
#
# 'e'
# if state == Init
# egalize points coordinates
#
# if state == SS
# edit segment properties
#
if self.evt == 'e':
if (self.state == 'Init'):
#
# averaging one point coordinate along the smallest dimension
#
x1 = self.ax.get_xbound()
y1 = self.ax.get_ybound()
# get node list and edge list
ndlist, edlist = self.L.get_zone([x1[0],x1[1],y1[0],y1[1]])
for k,nd in enumerate(ndlist):
try:
tp = np.vstack((tp,np.array(self.L.Gs.pos[nd])))
except:
tp = np.array(self.L.Gs.pos[nd])
mtp = np.sum(tp,axis=0)/(k+1)
stp = np.sqrt(np.sum((tp-mtp)*(tp-mtp),axis=0)/(k+1))
# if the standard deviation is lower than 10cm
# averaging coordinates along the shortest axis
if min(stp) < 0.10:
ind = np.where(stp==min(stp))[0][0]
for nd in ndlist:
x = self.L.Gs.pos[nd][0]
y = self.L.Gs.pos[nd][1]
if ind ==0:
self.L.Gs.pos[nd]=(mtp[0],y)
if ind ==1:
self.L.Gs.pos[nd]=(x,mtp[1])
plt.axis('tight')
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.update_state()
return()
if (self.state == 'SS') | (self.state =='SSS'):
self.L.edit_segment(self.selected_edge1)
self.state = 'Init'
self.update_state()
return
if self.state == 'SP1':
self.L.edit_point(self.selected_pt1)
self.state = 'Init'
self.update_state()
return
if self.state == 'SMS':
outdata=self.L.edit_segment(self.selectseg[0])
[self.L.edit_segment(s,outdata=outdata,gui=False) for s in self.selectseg]
self.update_state()
return
#
# "b" : enter a segment node value with keyboard
#
if self.evt == 'b':
if self.state == 'Init':
self.nsel = eval(raw_input("seg number :"))
#self.L.edit_segment(nseg)
self.state='SS'
self.update_state()
return
#
# j : vertical and horizontal scaling (Init)
#
if self.evt == 'j':
if self.state == 'Init':
vscale = eval(enterbox('enter vscale',argDefaultText='1.0'))
hscale = eval(enterbox('enter hscale',argDefaultText='1.0'))
for n in self.L.Gs.pos:
self.L.Gs.pos[n]=(self.L.Gs.pos[n][0]*hscale,self.L.Gs.pos[n][1]*vscale)
plt.axis('tight')
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.update_state()
return
# Init
# h : horizontal scaling factor
# add subsegment (SS)
#
if self.evt == 'h':
# if self.state == 'Init':
# hscale = eval(raw_input("horizontal scaling factor : "))
# for n in self.L.Gs.pos:
# self.L.Gs.pos[n]=(self.L.Gs.pos[n][0]*hscale,self.L.Gs.pos[n][1])
# plt.axis('tight')
# fig,ax = self.show(fig,ax,clear=True)
# self.update_state()
# return()
if self.state == 'SS':
result = self.L.add_subseg(self.selected_edge1,self.current_layer)
if result:
self.state = 'SSS'
else :
self.state = 'Init'
self.update_state()
return
#
# d : delete
#
if self.evt == 'd' or self.evt =='delete':
if self.state == 'SP1':
self.state = 'Init'
self.L.del_points(self.selected_pt1)
self.update_state()
return
if self.state == 'SS':
self.L.del_segment(self.selected_edge1)
self.state = 'Init'
self.update_state()
return
if self.state == 'SSS':
self.L.del_subseg(self.selected_edge1)
self.state = 'Init'
self.update_state()
return
if self.state=='SMP':
# get boundary of the region
if hasattr(self,'selectpt'):
ptlist = self.selectpt
self.selectpt=[]
self.selectseg=[]
self.L.del_points(ptlist)
self.state = 'Init'
self.update_state()
return
else :
print ('no selected region')
if self.state=='SMS':
seglist = self.selectseg
self.selectpt=[]
self.selectseg=[]
self.L.del_segment(seglist)
self.state = 'Init'
self.update_state()
return
else :
print ('no selected region')
#
# r : Refresh
#
if self.evt == 'r' or self.evt == 'f5':
#plt.axis('tight')
plt.axis(self.L.display['box'])
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.state = 'Init'
self.update_state()
return
#
# o : Toggle overlay
#
if self.evt == 'o' and not self.ctrl_is_held:
self.state='Init'
self.update_state()
if self.L.display['overlay']:
self.L.display['overlay'] = False
self.update_state()
else:
self.L.display['overlay'] = True
self.update_state()
return
if self.evt == 'o' :
self.set_origin = True
#
# F2 : Create point
#
if self.evt == 'f2':
self.state = "CP"
self.update_state()
return
#
# m : Toggle mode edition Point | Segment
#
if self.evt == 'm':
if self.state == "Init":
self.state = "CP"
elif self.state == "CP":
self.state = "Init"
self.update_state()
return
#
# 'z' : change display parameters
#
if self.evt == 'z':
self.L.displaygui()
self.fig,self.ax = self.show(fig=self.fig,ax=self.ax,clear=True)
return
#
# 'q' : quit interactive mode
#
# if self.evt == 'q':
# plt.rcParams.update(self.rcconf)
# fig.canvas.mpl_disconnect(self.L.cid1)
# fig.canvas.mpl_disconnect(self.L.cid2)
# return
if self.evt == 'ctrl+q':
plt.rcParams.update(self.rcconf)
self.fig.canvas.mpl_disconnect(self.L.cid1)
self.fig.canvas.mpl_disconnect(self.L.cid2)
plt.close()
return
#
# 'x' save structure
#
if self.evt == 'x' or self.evt =='ctrl+s':
racine, ext = os.path.splitext(self.L.filename)
filename = racine + '.str2'
fileini = racine + '.ini'
# Commented because ss_ce not updated
#self.L.savestr2(filename)
self.L.saveini(fileini)
print( "structure saved in ", filename)
print( "structure saved in ", fileini)
return
#
# 'n' : toggle node label display
#
if self.evt == 'n':
self.L.display['ndlabel'] = not self.L.display['ndlabel']
self.L.display['edlabel'] = not self.L.display['edlabel']
print( self.L.display['activelayer'])
self.fig,ax = self.show(fig=self.fig,ax=self.ax,clear=True)
self.fig.canvas.draw()
return
#
# "w" : display all layers
#
if self.evt == 'w':
# display all layer
self.L.display['activelayer'] = self.L.name.keys()
print( self.L.display['activelayer'])
self.fig,self.ax = self.show(fig=self.fig,ax=self.ax,clear=True)
return self.fig,self.ax
#
# Left clic and selected node is a point
#
if (self.evt == 'lclic') & (self.nsel < 0):
#
# select point 1 : Init -> SP1
#
if self.state=='Init':
# yellow point
self.state = 'SP1'
self.update_state()
return
#
# select point 2 : SP1 --> SP2
#
if self.state=='SP1':
if self.nsel != self.selected_pt1:
# green point
self.state = 'SP2'
self.update_state()
return
else:
self.state = 'Init'
# yellow point
self.update_state()
return
#
# Create point on selected segment orthogonaly to segment starting in
# selected point
#
# Not finished
#
if self.state=='SS':
# get the connection of the selected segment
connect = self.L.Gs.node[self.selected_edge1]['connect']
if (self.nsel != connect[0]) & (self.nsel != connect[1]):
self.L.add_nfpe(self.nsel,self.nsel,self.selected_edge1,self.selected_edge2)
pass
#
# Left clic and selected node is a segment
#
if (self.evt == 'lclic') & (self.nsel > 0):
if self.state=='Init':
self.state = 'SS'
self.update_state()
return
if self.state=='SS':
self.nsel = self.selected_edge1
segdico = self.L.Gs.node[self.nsel]
if 'ss_name' in segdico:
self.state = 'SSS'
else:
self.state = 'CPS'
self.update_state()
return
#
# Right clic and selected node is a point
#
if (self.evt == 'rclic') & (self.nsel < 0):
if self.state=='SP1':
if self.nsel==self.selected_pt1:
self.state = 'Init'
self.update_state()
return
#
# Right clic and selected node is a segment
#
if (self.evt == 'rclic') & (self.nsel > 0):
if self.state=='SS':
self.state = 'Init'
self.update_state()
return
if self.state=='SSS':
self.state = 'SS'
self.update_state()
return
if self.state == 'CP':
# create point on edge
self.state = 'CPS'
self.update_state()
return
if (self.state == 'CPS') & (self.nsel!= self.selected_edge1):
# create point on edge
self.state = 'CPSS'
self.update_state()
return
#
# Left clic
#
if (self.evt == 'lclic') and not (self.shift_is_held or self.alt_is_held or self.ctrl_is_held ):
# add free node
# or set origin
if self.state == 'CP':
if self.set_origin:
offx = self.ptsel[0]
offy = self.ptsel[1]
print( offx,offy)
xmin,xmax,ymin,ymax = self.L.display['box']
self.L.display['box'] = [xmin-offx,xmax-offx,ymin-offy,ymax-offy]
self.set_origin=False
self.set_x=True
plt.axis('tight')
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.update_state()
return
if self.set_x:
offx = self.ptsel[0]
val = eval(enterbox('enter x value'))
ratio = val/offx
print( ratio)
xmin,xmax,ymin,ymax = self.L.display['box']
self.L.display['box'] = [ratio*xmin,ratio*xmax,ymin,ymax]
self.set_x=False
self.set_y=True
plt.axis('tight')
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.update_state()
return
if self.set_y:
offx = self.ptsel[1]
val = eval(enterbox('enter y value'))
ratio = val/offx
print( ratio)
xmin,xmax,ymin,ymax = self.L.display['box']
self.L.display['box'] = [xmin,xmax,ratio*ymin,ratio*ymax]
self.set_y=False
plt.axis('tight')
self.fig,self.ax = self.show(self.fig,self.ax,clear=True)
self.update_state()
return
else:
self.L.add_fnod(tuple(self.ptsel))
self.pt_previous = self.ptsel
self.update_state()
return
if self.state == 'SP2':
ta = self.selected_pt1
he = self.selected_pt2
segexist = self.L.isseg(ta,he)
print( segexist)
# if segment do not already exist, create it
if not segexist:
self.nsel = self.L.add_segment(ta, he,name=self.current_layer)
else:
print( "segment ("+str(ta)+","+str(he)+") already exists")
self.L.g2npy()
self.state = 'Init'
self.update_state()
return
# create point on segment
if self.state == 'CPS':
pt_new = geu.ptonseg(self.pta1, self.phe1, self.ptsel)
pd1 = pt_new - self.pta1
pd2 = self.phe1 - self.pta1
alpha = np.sqrt(np.dot(pd1, pd1)) / np.sqrt(np.dot(pd2, pd2))
if (pt_new != []):
# calculate alpha
self.L.add_pons(self.selected_edge1, 1. - alpha)
self.current_layer = self.L.Gs.node[self.selected_edge1]['name']
self.state = 'Init'
self.update_state()
return
#
# Right Clic event
#
if (self.evt == 'rclic') or (self.evt == 'lclic' and self.ctrl_is_held ):
if self.state == 'CP':
try:
self.ptsel[0] = self.pt_previous[0]
self.L.add_fnod(tuple(self.ptsel))
self.pt_previous = self.ptsel
self.update_state()
return
except:
return
if self.state=='SP2':
if self.nsel == self.selected_pt1:
self.p1[0].set_visible(False)
self.p2[0].set_visible(False)
self.nsel = self.selected_pt2
self.state = 'SP1'
self.update_state()
return
if self.nsel == self.selected_pt2:
self.p1[0].set_visible(False)
self.p2[0].set_visible(False)
self.nsel = self.selected_pt1
self.state = 'SP1'
self.update_state()
return
#
# right click : back to SS from CPS
#
if self.state == 'CPS':
self.state = 'SS'
self.update_state()
return
#
# right click : back to CPS from CPSS
#
if self.state == 'CPSS':
self.state = 'CPS'
self.update_state(self.fig,self.ax)
return
#
# Center Clic event
#
if (self.evt == 'cclic') or (self.evt == 'lclic' and self.shift_is_held ):
if self.state == 'CP':
try:
self.ptsel[1] = self.pt_previous[1]
self.L.add_fnod(tuple(self.ptsel))
self.pt_previous = self.ptsel
self.update_state()
return
except:
return
#
# Left clic and selected node is a point
#
def point_select_callback(eclick, erelease):
'eclick and erelease are the press and release events'
self.update_state()
if not (self.shift_is_held or self.ctrl_is_held):
self.selectpt=[]
self.selectseg=[]
x1, y1 = eclick.xdata, eclick.ydata
x2, y2 = erelease.xdata, erelease.ydata
# print x1,x2,y1,y2
if x1>x2:
x1,x2=x2,x1
if y1>y2:
y1,y2=y2,y1
# try:
selectpt,selectseg = self.L.get_zone([x1,x2,y1,y2])
if not self.ctrl_is_held:
self.selectpt.extend(selectpt)
self.selectseg.extend(selectseg)
self.selectseg=filter(lambda x: self.L.Gs.node[x]['connect'][0] in self.selectpt
and self.L.Gs.node[x]['connect'][1] in self.selectpt,
self.selectseg)
self.selectpt=np.unique(self.selectpt).tolist()
self.selectseg=np.unique(self.selectseg).tolist()
else:
[self.selectpt.pop(self.selectpt.index(x)) for x in selectpt if x in self.selectpt]
[self.selectseg.pop(self.selectseg.index(x)) for x in selectseg if x in self.selectseg]
# except:
# print 'empty selection'
print(self.selectpt,self.selectseg)
self.plotselptseg(self.selectpt)
self.selected='pt'
print(self.state)
def toggle_selector(event):
if toggle_selector.RS.active:
toggle_selector.RS.set_active(False)
if not toggle_selector.RS.active:
toggle_selector.RS.set_active(True)
if self.evt == 'f1':
#avoid conflict between zoom and selection
# fm=plt.get_current_fig_manager()
# if fm.toolbar._active == 'PAN':
# fm.toolbar.pan()
# if fm.toolbar._active == 'ZOOM':
# fm.toolbar.zoom()
self.state='SMP'
toggle_selector.RS = RectangleSelector(self.ax, point_select_callback,
drawtype='box', useblit=True,
button=[1,3], # don't use middle button
minspanx=5, minspany=5,
spancoords='pixels')
self.selector = toggle_selector.RS
self.update_state()
if self.evt == 'f9':
print(self.selectpt, self.selectseg)
#print self.selectsl
# plt.connect('key_press_event', toggle_selector)
if __name__ == '__main__':
import doctest
doctest.testmod()
| {
"content_hash": "95bd51e319e95b57434d335aef33ee21",
"timestamp": "",
"source": "github",
"line_count": 1230,
"max_line_length": 127,
"avg_line_length": 33.96504065040651,
"alnum_prop": 0.4567106302510951,
"repo_name": "pylayers/pylayers",
"id": "af5a7e136349afcc98fd60f82c00b1118363ea5a",
"size": "41801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pylayers/gis/selectl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "584"
},
{
"name": "Jupyter Notebook",
"bytes": "52724429"
},
{
"name": "Python",
"bytes": "3907177"
},
{
"name": "Shell",
"bytes": "1512"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('scraps', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ScrapBook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=100, verbose_name='Scrap book title')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ScrapBookItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('book', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='scrapbooks.ScrapBook')),
('scrap', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='references', to='scraps.Scrap')),
],
options={
'abstract': False,
},
),
]
| {
"content_hash": "c48894548efcd6b14a5aa2a014030049",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 147,
"avg_line_length": 43.372093023255815,
"alnum_prop": 0.6144772117962467,
"repo_name": "tonysyu/scrappyr-app",
"id": "601cc3f9c53ba31637340500e694f2321455e6fc",
"size": "1938",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scrappyr/scrapbooks/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2033"
},
{
"name": "HTML",
"bytes": "23257"
},
{
"name": "JavaScript",
"bytes": "18890"
},
{
"name": "Python",
"bytes": "71659"
},
{
"name": "Shell",
"bytes": "4188"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_namespace import V1Namespace
class TestV1Namespace(unittest.TestCase):
""" V1Namespace unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1Namespace(self):
"""
Test V1Namespace
"""
model = kubernetes.client.models.v1_namespace.V1Namespace()
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "62557fccdca76e2847f96ab0b9b5c0d0",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 105,
"avg_line_length": 20.2,
"alnum_prop": 0.6806930693069307,
"repo_name": "sebgoa/client-python",
"id": "814210935fd35b68237bfdaaff86c44cc70cb64d",
"size": "825",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kubernetes/test/test_v1_namespace.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5855378"
},
{
"name": "Shell",
"bytes": "16387"
}
],
"symlink_target": ""
} |
import csv
import sys
from graphmagic import graphanalyse
def main():
"""Takes csv file path as a parameter in command line call."""
# take path from sysarg
if len(sys.argv) < 2:
print("Please enter csv file source")
return
path = sys.argv[1]
with open(path, 'rb') as csvfile:
matrix = [[int(r) for r in rec]
for rec in csv.reader(csvfile, delimiter=',')]
print("The number of triangles in the graph is: %s"
% graphanalyse.Graph().triangle_count(matrix))
if __name__ == '__main__':
main()
| {
"content_hash": "9b07da98ac6372a02e6328749e4f9942",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 66,
"avg_line_length": 23.08,
"alnum_prop": 0.5944540727902946,
"repo_name": "katepavlovic/graphmagic",
"id": "1d74c437734896967777a02d2f2e3145012b79ff",
"size": "1207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphmagic/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "7571"
}
],
"symlink_target": ""
} |
"""
>>> from pyspark.context import SparkContext
>>> sc = SparkContext('local', 'test')
>>> b = sc.broadcast([1, 2, 3, 4, 5])
>>> b.value
[1, 2, 3, 4, 5]
>>> from pyspark.broadcast import _broadcastRegistry
>>> _broadcastRegistry[b.bid] = b
>>> from cPickle import dumps, loads
>>> loads(dumps(b)).value
[1, 2, 3, 4, 5]
>>> sc.parallelize([0, 0]).flatMap(lambda x: b.value).collect()
[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
>>> large_broadcast = sc.broadcast(list(range(10000)))
"""
# Holds broadcasted data received from Java, keyed by its id.
_broadcastRegistry = {}
def _from_id(bid):
from pyspark.broadcast import _broadcastRegistry
if bid not in _broadcastRegistry:
raise Exception("Broadcast variable '%s' not loaded!" % bid)
return _broadcastRegistry[bid]
class Broadcast(object):
"""
A broadcast variable created with
L{SparkContext.broadcast()<pyspark.context.SparkContext.broadcast>}.
Access its value through C{.value}.
"""
def __init__(self, bid, value, java_broadcast=None, pickle_registry=None):
"""
Should not be called directly by users -- use
L{SparkContext.broadcast()<pyspark.context.SparkContext.broadcast>}
instead.
"""
self.value = value
self.bid = bid
self._jbroadcast = java_broadcast
self._pickle_registry = pickle_registry
def __reduce__(self):
self._pickle_registry.add(self)
return (_from_id, (self.bid, ))
| {
"content_hash": "6d5442217a7545c3d3177ffa8a4f4167",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 78,
"avg_line_length": 29.42,
"alnum_prop": 0.6349422161794698,
"repo_name": "cloudera/spark",
"id": "43f40f8783bfd426cb94f031527cc11ee43059c7",
"size": "2256",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "python/pyspark/broadcast.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9210"
},
{
"name": "Java",
"bytes": "226012"
},
{
"name": "JavaScript",
"bytes": "20318"
},
{
"name": "Python",
"bytes": "249380"
},
{
"name": "Ruby",
"bytes": "2261"
},
{
"name": "Scala",
"bytes": "3357035"
},
{
"name": "Shell",
"bytes": "72264"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from .forms import UserCreationForm, UserChangeForm
from .models import User
@admin.register(User)
class UserAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Permissions'), {'fields': ('is_active', 'is_superuser')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2'),
}),
)
form = UserChangeForm
add_form = UserCreationForm
list_display = ('id', 'email', 'is_active', 'is_superuser',)
list_filter = ('is_superuser', 'is_active',)
search_fields = ('email',)
ordering = ('id',)
filter_horizontal = ()
| {
"content_hash": "f6f7f55f38e0b50bd09083bfe9873ada",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 70,
"avg_line_length": 30.22222222222222,
"alnum_prop": 0.6053921568627451,
"repo_name": "silverlogic/blockhunt-back",
"id": "e9d0e8c8cb6c7648849f855d6b4c242a6d7074af",
"size": "816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blockhunt/users/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "223869"
},
{
"name": "HTML",
"bytes": "3286"
},
{
"name": "JavaScript",
"bytes": "280583"
},
{
"name": "Python",
"bytes": "69479"
},
{
"name": "Ruby",
"bytes": "7987"
}
],
"symlink_target": ""
} |
"""An example to train TD3 algorithm on InvertedDoublePendulum PyTorch."""
import torch
from torch.nn import functional as F
# from garage.np.exploration_policies import AddGaussianNoise
from garage import wrap_experiment
from garage.envs import GymEnv, normalize
from garage.experiment.deterministic import set_seed
from garage.np.exploration_policies import AddGaussianNoise
from garage.np.policies import UniformRandomPolicy
from garage.replay_buffer import PathBuffer
from garage.sampler import FragmentWorker, LocalSampler
from src.algos import TD3
from garage.torch.policies import DeterministicMLPPolicy
from garage.torch.q_functions import ContinuousMLPQFunction
from garage.trainer import Trainer
from garage.torch import set_gpu_mode
from src.replay_buffer import *
import gym
@wrap_experiment(snapshot_mode='none')
def train(ctxt=None):
"""Train TD3 with InvertedDoublePendulum-v2 environment.
Args:
ctxt (garage.experiment.ExperimentContext): The experiment
configuration used by LocalRunner to create the snapshotter.
seed (int): Used to seed the random number generator to produce
determinism.
"""
set_seed(config.seed)
n_epochs = 50
steps_per_epoch = 40
sampler_batch_size = 250
num_timesteps = n_epochs * steps_per_epoch * sampler_batch_size
buffer_batch_size = 256
grad_steps_per_env_step = 100
trainer = Trainer(ctxt)
env = normalize(GymEnv(gym.make('FetchReach-v1')))
policy = DeterministicMLPPolicy(env_spec=env.spec,
hidden_sizes=[256, 256],
hidden_nonlinearity=F.relu,
output_nonlinearity=torch.tanh)
exploration_policy = AddGaussianNoise(env.spec,
policy,
total_timesteps=num_timesteps,
max_sigma=0.1,
min_sigma=0.1)
uniform_random_policy = UniformRandomPolicy(env.spec)
qf1 = ContinuousMLPQFunction(env_spec=env.spec,
hidden_sizes=[256, 256],
hidden_nonlinearity=F.relu)
qf2 = ContinuousMLPQFunction(env_spec=env.spec,
hidden_sizes=[256, 256],
hidden_nonlinearity=F.relu)
use_custom_sampling_pdist = False
if config.replay_buffer_sampler == 'reverse':
replay_buffer = ReversePathBuffer(capacity_in_transitions=int(1e6))
elif config.replay_buffer_sampler == 'reverse++':
use_custom_sampling_pdist = True
replay_buffer = ReversePPPathBuffer(capacity_in_transitions=int(1e6))
elif config.replay_buffer_sampler == 'forward++':
use_custom_sampling_pdist = True
replay_buffer = ForwardPPPathBuffer(capacity_in_transitions=int(1e6))
elif config.replay_buffer_sampler == 'hreverse++':
use_custom_sampling_pdist = True
replay_buffer = HReversePPPathBuffer(capacity_in_transitions=int(1e6))
elif config.replay_buffer_sampler == 'uniform_reverse++':
use_custom_sampling_pdist = True
replay_buffer = UniformReversePPPathBuffer(
capacity_in_transitions=int(1e6))
elif config.replay_buffer_sampler == 'optimistic':
use_custom_sampling_pdist = True
replay_buffer = OptimisticPathBuffer(capacity_in_transitions=int(1e6))
elif config.replay_buffer_sampler == 'hindsight':
replay_buffer = HERReplayBuffer(
replay_k=4, reward_fn=env.compute_reward,
capacity_in_transitions=int(1e6), env_spec=env.spec)
elif config.replay_buffer_sampler == 'prioritized':
replay_buffer = PrioritizedReplayBuffer(
capacity_in_transitions=int(1e6))
else:
grad_steps_per_env_step = 100
replay_buffer = PathBuffer(capacity_in_transitions=int(1e6))
sampler = LocalSampler(agents=exploration_policy,
envs=env,
max_episode_length=env.spec.max_episode_length,
worker_class=FragmentWorker)
td3 = TD3(env_spec=env.spec,
policy=policy,
qf1=qf1,
qf2=qf2,
replay_buffer=replay_buffer,
sampler=sampler,
policy_optimizer=torch.optim.Adam,
qf_optimizer=torch.optim.Adam,
exploration_policy=exploration_policy,
uniform_random_policy=uniform_random_policy,
target_update_tau=0.01,
discount=0.95,
policy_noise_clip=0.5,
policy_noise=0.2,
policy_lr=1e-3,
qf_lr=1e-3,
steps_per_epoch=50,
start_steps=1000,
grad_steps_per_env_step=grad_steps_per_env_step,
min_buffer_size=int(1e4),
buffer_batch_size=256,
use_custom_sampling_pdist=use_custom_sampling_pdist)
if torch.cuda.is_available():
set_gpu_mode(True)
td3.to()
trainer.setup(algo=td3, env=env)
trainer.train(n_epochs=100, batch_size=256)
def train_td3_fetchreach(args):
global config
config = args
train({'log_dir': args.snapshot_dir,
'use_existing_dir': True})
| {
"content_hash": "762ea6484fc24374ffceee68e93a28ac",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 78,
"avg_line_length": 40.83969465648855,
"alnum_prop": 0.6214953271028038,
"repo_name": "google-research/look-back-when-surprised",
"id": "245f3a237ebdf07a88177c7a4fc00ca8a1dfa112",
"size": "5609",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/launchers/td3_fetchreach.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "141633"
}
],
"symlink_target": ""
} |
# -*- coding: iso-8859-1 -*-
#
"""
@ 2009 by Uche ogbuji <uche@ogbuji.net>
This file is part of the open source Akara project,
provided under the Apache 2.0 license.
See the files LICENSE and NOTICE for details.
Project home, documentation, distributions: http://wiki.xml3k.org/Akara
Module name:: moinrest
= Defined REST entry points =
http://purl.org/akara/services/demo/collection (moin)
- Under there top mount point is oen or more lower points depending on config,
each of which handles HEAD, GET, POST, PUT, DELETE
= Configuration =
You'll need a config entry such as:
[moinrest]
target-xml3k=http://wiki.xml3k.org
class moinrest:
targets = {"xml3k": "http://wiki.xml3k.org"}
= Notes on security and authentication =
There are two separate aspects to authentication that moinrest has to
consider and which may need to be configured independently. First, if
the HTTP server that is running the target wiki is configured with
site-wide basic authentication, you will need to include an
appropriate username and password in the target configuration above.
For example:
[moinrest]
target-xml3k=http://user:password@wiki.xml3k.org
where "user" and "password" are filled in with the appropriate
username and password. If you're not sure if you need this, try
connecting to the wiki using a browser. If the browser immediately
displays a pop-up window asking you for a username and password,
you'll need to supply that information in the moinrest configuration
as shown. If no pop-up window appears, the HTTP server is not using
authentication.
The second form of authentication concerns access to the MoinMoin wiki
itself. In order to modify pages, users may be required to log in to
the wiki first using the wiki's "login" link. These credentials are
passed to moinrest using HTTP Basic Authentication. Thus, they need
to be passed in the HTTP headers of requests. For example, using curl
you would type something like this:
curl -u me:passwd -p --request PUT --data-binary "@wikicontent.txt" --header "Content-Type: text/plain" "http://localhost:8880/moin/xml3k/FooTest"
Keep in mind that username and password credentials given to moinrest
requests are only for the target wiki. They are not the same as basic
authentication for the HTTP server hosting the wiki.
"""
#Detailed license and copyright information: http://4suite.org/COPYRIGHT
from __future__ import with_statement
SAMPLE_QUERIES_DOC = '''
Some sample queries:
curl http://localhost:8880/moin/xml3k/FrontPage
curl -H "Accept: application/docbook+xml" http://localhost:8880/moin/xml3k/FrontPage
curl -H "Accept: application/rdf+xml" http://localhost:8880/moin/xml3k/FrontPage
curl -H "Accept: application/x-moin-attachments+xml" http://localhost:8880/moin/xml3k/FrontPage
curl --request PUT --data-binary "@wikicontent.txt" --header "Content-Type: text/plain" "http://localhost:8880/moin/xml3k/FooTest"
curl --request POST --data-binary "@wikicontent.txt" --header "Content-Type: text/plain" "http://localhost:8880/moin/xml3k/FooTest;attachment=wikicontent.txt"
curl --request DELETE http://localhost:8880/moin/xml3k/FrontPage
curl -u me:passwd -p --request PUT --data-binary "@wikicontent.txt" --header "Content-Type: text/plain" "http://localhost:8880/moin/xml3k/FooTest"
Get an attached page:
curl "http://localhost:8880/moin/xml3k/FooTest;attachment=wikicontent.txt"
Get a page's history:
curl http://localhost:8880/moin/xml3k/FrontPage;history
''' #' # work-around emacs' inability to parse this level of embedded quotes
__doc__ += SAMPLE_QUERIES_DOC
# Standard library imports
import sys # Used only from sys.stderr
import os
import cgi
import httplib, urllib, urllib2
from string import Template
from cStringIO import StringIO
import tempfile
from contextlib import closing
from wsgiref.util import shift_path_info, request_uri
from functools import wraps
from itertools import dropwhile
# Amara Imports
import amara
from amara import bindery
from amara.lib.util import first_item
from amara.lib.iri import absolutize, relativize, join
from amara.writers.struct import structencoder, E, NS, ROOT, RAW
from amara.bindery.html import parse as htmlparse
from amara.bindery.model import examplotron_model, generate_metadata
from amara.lib.iri import split_fragment, relativize, absolutize, split_uri_ref, split_authority, unsplit_uri_ref
from amara.lib.iri import split_uri_ref, unsplit_uri_ref, split_authority, absolutize
#from amara import inputsource
# Akara Imports
from akara import module_config, logger, response
from akara.util import multipart_post_handler, wsgibase, http_method_handler, copy_headers_to_dict
from akara.services import method_dispatcher
from akara.util import status_response, read_http_body_to_temp
from akara.util import BadTargetError, HTTPAuthorizationError, MoinAuthorizationError, UnexpectedResponseError, MoinMustAuthenticateError, MoinNotFoundError, ContentLengthRequiredError, GenericClientError
import akara.util.moin as moin
# ======================================================================
# Module Configruation
# ======================================================================
TARGET_WIKIS = module_config().get("targets", {})
TARGET_WIKI_OPENERS = {}
DEFAULT_OPENER = urllib2.build_opener(
urllib2.HTTPCookieProcessor(),
multipart_post_handler.MultipartPostHandler)
# Specifies the default max-age of Moin pages
CACHE_MAX_AGE = module_config().get("CACHE_MAX_AGE", None)
# Specifies a Wiki path (currently only one, FIXME) under which no caching will occur
NO_CACHE_PATH = module_config().get("NO_CACHE_PATH", None)
# Look at each Wiki URL and build an appropriate opener object for retrieving
# pages. If the URL includes HTTP authentication information such as
# http://user:pass@somedomain.com/mywiki, the opener is built with
# basic authentication enabled. For details, see:
#
# : HTTP basic auth: http://www.voidspace.org.uk/python/articles/urllib2.shtml#id6
for k, v in TARGET_WIKIS.items():
(scheme, authority, path, query, fragment) = split_uri_ref(v)
auth, host, port = split_authority(authority)
authority = host + ':' + port if port else host
schemeless_url = authority + path
if auth:
TARGET_WIKIS[k] = unsplit_uri_ref((scheme, authority, path, query, fragment))
auth = auth.split(':')
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
# Not setting the realm for now, so use None
password_mgr.add_password(None, scheme+"://"+host+path, auth[0], auth[1])
password_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
TARGET_WIKI_OPENERS[k] = urllib2.build_opener(
password_handler,
urllib2.HTTPCookieProcessor(),
multipart_post_handler.MultipartPostHandler)
else:
TARGET_WIKI_OPENERS[k] = DEFAULT_OPENER
SERVICE_ID = 'http://purl.org/xml3k/akara/services/demo/moinrest'
DEFAULT_MOUNT = 'moin'
# ======================================================================
# Response Templates
# ======================================================================
# These template strings contain the output produced for various
# error conditions.
error_badtarget = Template("""\
404 Not Found
The requested URL $fronturl not found.
Nothing is known about moin target $target.
"""
)
error_httpforbidden = Template("""\
403 Forbidden
Request for URL $url
is being rejected by the Moin HTTP server due to bad HTTP
authentication. Check the Akara's moinrest configuration to make
sure it includes an appropriate HTTP user name and password.
"""
)
error_moinauthforbidden = Template("""\
403 Forbidden
Request for login URL $url
is being rejected by MoinMoin because the username and password
aren't recognized. Check your request to moinrest to make sure
a valid Moin username and password are being supplied.
"""
)
error_moinmustauthenticateresponse = Template("""\
401 Unauthorized
Request for URL $url
requires a valid Moin username and password.
"""
)
error_unexpectedresponse = Template("""\
500 Internal Error
Request for URL $url
failed because an unexpected HTTP status code $code was received.
$error
"""
)
error_moinnotfoundresponse = Template("""\
404 Not Found
The requested URL $fronturl not found.
The URL $backurl was not found in the target wiki.
"""
)
error_contentlengthrequired = Template("""\
411 Length Required
A POST or PUT request was made, but no data was found.
""")
# ======================================================================
# moin_error_handler
# ======================================================================
# This error handling function is what actually runs all of the WSGI
# functions implemented by the modules. It catches MoinRest specific exceptions
# and produces appropriate error responses as needed.
#
# The reason for putting this functionality in a single function is to avoid a lot
# excessive code duplication between different HTTP methods. For example,
# the handlers for each HTTP method are going to have to deal with
# many of the same error conditions, faults, and responses. Centralizing
# the handling makes it possible to deal all of the errors in just one place.
def moin_error_wrapper(wsgiapp):
@wraps(wsgiapp)
def handler(environ, start_response):
status_info = {} # Dictionary of collected status information
# Replacement for the WSGI start_response function. This merely
# collects response data in a dictionary for later use if no errors occur
def local_start_response(status, headers):
status_info['status'] = status
status_info['headers'] = headers
# Try to run the supplied WSGI handler
try:
body = wsgiapp(environ, local_start_response)
# If control reaches here, no errors. Proceed with normal WSGI response
start_response(status_info['status'],status_info['headers'])
return body
# Error handling for specifying an invalid moin target name (i.e., not configured, misspelled)
except BadTargetError,e:
start_response(status_response(httplib.NOT_FOUND), [
('Content-Type','text/plain')
])
return error_badtarget.safe_substitute(e.parms)
# Error handling for back-end HTTP authorization failure. For example,
# if the HTTP server hosting MoinMoin has rejected our requests due to
# bad HTTP authorization.
except HTTPAuthorizationError,e:
start_response(status_response(httplib.FORBIDDEN), [
('Content-Type','text/plain')
])
return error_httpforbidden.safe_substitute(e.parms)
# Error handling for MoinMoin authorization failure. This occurs
# if the user and password supplied to MoinMoin is rejected.
except MoinAuthorizationError,e:
start_response(status_response(httplib.FORBIDDEN), [
('Content-Type','text/plain')
])
return error_moinauthforbidden.safe_substitute(e.parms)
# Error handling for unexpected HTTP status codes
except UnexpectedResponseError,e:
start_response(status_response(httplib.INTERNAL_SERVER_ERROR), [
('Content-Type','text/plain')
])
return error_unexpectedresponse.safe_substitute(e.parms)
# Authentication required by MoinMoin. This isn't an error, but we
# have to translate this into a 401 response to send back to the client
# in order to get them to supply the appropriate username/password
except MoinMustAuthenticateError,e:
start_response(status_response(httplib.UNAUTHORIZED), [
('Content-Type','text/plain'),
('WWW-Authenticate','Basic realm="%s"' % e.parms.get('target',''))
])
return error_moinmustauthenticateresponse.safe_substitute(e.parms)
# Page in the target-wiki not found. 404 the client
except MoinNotFoundError,e:
start_response(status_response(httplib.NOT_FOUND), [
('Content-Type','text/plain'),
])
return error_moinnotfoundresponse.safe_substitute(e.parms)
# Content-length is required for uploaded data
except ContentLengthRequiredError,e:
start_response(status_response(httplib.LENGTH_REQUIRED), [
('Content-Type','text/plain')
])
return error_contentlengthrequired.safe_substitute(e.parms)
# Used for Moin errors indicated in 2xx HTML responses. No
# need for canned text since the error text is in the HTML
except GenericClientError,e:
start_response(status_response(httplib.BAD_REQUEST), [
('Content-Type','text/plain')
])
return e.parms.get('error')
return handler
# ----------------------------------------------------------------------
# Support functions used by handlers
# ----------------------------------------------------------------------
# Utility function for generating status rsponses for WSGI
def status_response(code):
return '%i %s'%(code, httplib.responses[code])
# Returns information about the target wiki. Raises BadTargetError if nothing
# is known about the target name
def target(environ):
wiki_id = shift_path_info(environ)
full_incoming_request = request_uri(environ)
if wiki_id not in TARGET_WIKIS:
raise BadTargetError(fronturl=request_uri(environ), target=wiki_id)
original_page = join(TARGET_WIKIS[wiki_id].rstrip('/')+'/', environ['PATH_INFO'].lstrip('/'))
#relative_to_wrapped = relativize(, full_incoming_request)
wrapped_wiki_base = full_incoming_request[:-len(environ['PATH_INFO'])]
return wiki_id, TARGET_WIKIS[wiki_id], TARGET_WIKI_OPENERS.get(wiki_id), original_page, wrapped_wiki_base
# Check authentication of the user on the MoinMoin wiki
def check_auth(environ, start_response, base, opener, headers=None):
'''
Warning: mutates environ in place
If HTTP auth succeeds will also attach a cookie to the opener object in place
'''
auth = environ.get('HTTP_AUTHORIZATION')
#logger.debug('GRIPPO ' + repr((headers)))
if not auth:
return False
scheme, data = auth.split(None, 1)
if scheme.lower() != 'basic':
raise RuntimeError('Unsupported HTTP auth scheme: %s'%scheme)
username, password = data.decode('base64').split(':', 1)
url = absolutize('?action=login&name=%s&password=%s&login=login'%(username, password), base)
request = urllib2.Request(url, None, headers)
try:
with closing(opener.open(request)) as resp:
#Don't need to do anything with the response. The cookies will be captured automatically
pass
except urllib2.URLError,e:
if e.code == 401:
# If we're here, the backend HTTP server has likely rejected our request due to HTTP auth
raise HTTPAuthorizationError(url=url)
elif e.code == 403:
# If we get a forbidden response, we made it to MoinMoin but the user name/pass was rejected
raise MoinAuthorizationError(url=url)
else:
raise UnexpectedResponseError(url=url,code=e.code,error=str(e))
environ['REMOTE_USER'] = username
return True
def fill_page_edit_form(page, wiki_id, base, opener, headers=None):
url = absolutize(page, base)
request = urllib2.Request(url+"?action=edit&editor=text", None, headers)
#logger.debug('GRIPPO ' + repr((headers)))
try:
with closing(opener.open(request)) as resp:
x = resp.read(); resp = x
doc = htmlparse(resp)
raise_embedded_error(doc)
except urllib2.URLError,e:
# Comment concerning the behavior of MoinMoin. If an attempt is made to edit a page
# and the user is not authenticated, you will either get a 403 or 404 error depending
# on whether or not the page being edited exists or not. If it doesn't exist,
# MoinMoin sends back a 404 which is misleading. We raise MoinMustAuthenticateError
# to signal the error wrapper to issue a 401 back to the client
#Note: Moin for somereason seems to give 403 errors on some URLs in response to Curl's UA
if e.code == 403 or e.code == 404:
raise MoinMustAuthenticateError(url=request.get_full_url(),target=wiki_id)
else:
raise UnexpectedResponseError(url=request.get_full_url(),code=e.code,error=str(e))
try:
form = doc.html.body.xml_select(u'.//*[@id="editor"]')[0]
except Exception as ex:
#XXX There seems to be a crazy XPath bug that only manifests here
#Use non-XPath as a hack-around :(
#open('/tmp/foo.html', 'w').write(x)
logger.debug('Stupid XPath bug. Working around... ' + repr(ex))
from amara.lib.util import element_subtree_iter
form = [ e for e in element_subtree_iter(doc.html.body) if e.xml_attributes.get(u'id') == u'editor' ][0]
#logger.debug('GRIPPO ' + repr(doc.html.body.xml_select(u'.//form')))
#logger.debug('GRIPPO ' + repr((form.xml_namespace, form.xml_local, form.xml_qname, form.xml_name, dict(form.xml_attributes))))
form_vars = {}
#form / fieldset / input
form_vars["action"] = [ e for e in element_subtree_iter(form) if e.xml_attributes.get(u'name') == u'action' ][0].xml_attributes[u'value']
form_vars["rev"] = [ e for e in element_subtree_iter(form) if e.xml_attributes.get(u'name') == u'rev' ][0].xml_attributes[u'value']
form_vars["ticket"] = [ e for e in element_subtree_iter(form) if e.xml_attributes.get(u'name') == u'ticket' ][0].xml_attributes[u'value']
form_vars["editor"] = [ e for e in element_subtree_iter(form) if e.xml_attributes.get(u'name') == u'editor' ][0].xml_attributes[u'value']
#logger.debug('Edit form vars ' + repr(form_vars))
return form_vars
form_vars = {}
#form / fieldset / input
form_vars["action"] = unicode(form.xml_select(u'string(*/*[@name="action"]/@value)'))
form_vars["rev"] = unicode(form.xml_select(u'string(*/*[@name="rev"]/@value)'))
form_vars["ticket"] = unicode(form.xml_select(u'string(*/*[@name="ticket"]/@value)'))
form_vars["editor"] = unicode(form.xml_select(u'string(*/*[@name="editor"]/@value)'))
#logger.debug('Edit form vars ' + repr(form_vars))
return form_vars
def fill_attachment_form(page, attachment, wiki_id, base, opener, headers=None):
url = absolutize(page, base)
request = urllib2.Request(url + '?action=AttachFile', None, headers)
try:
with closing(opener.open(request)) as resp:
doc = htmlparse(resp)
raise_embedded_error(doc)
except urllib2.URLError,e:
# Comment concerning the behavior of MoinMoin. If an attempt is made to post to a page
# and the user is not authenticated, you will either get a 403 or 404 error depending
# on whether or not the page being edited exists or not. If it doesn't exist,
# MoinMoin sends back a 404 which is misleading. We raise MoinMustAuthenticateError
# to signal the error wrapper to issue a 401 back to the client
if e.code == 403 or e.code == 404:
raise MoinMustAuthenticateError(url=request.get_full_url(),target=wiki_id)
else:
raise UnexpectedResponse(url=request.get_full_url(),code=e.code,error=str(e))
form = doc.html.body.xml_select(u'.//*[@id="content"]/form')[0]
form_vars = {}
#form / dl / ... dd
#Was called rename in 1.8.x, target in 1.9.x
form_vars["rename"] = unicode(attachment)
form_vars["target"] = unicode(attachment)
#FIXME: parameterize
form_vars["overwrite"] = u'1'
form_vars["action"] = unicode(form.xml_select(u'string(*/*[@name="action"]/@value)'))
form_vars["do"] = unicode(form.xml_select(u'string(*/*[@name="do"]/@value)'))
form_vars["ticket"] = unicode(form.xml_select(u'string(*/*[@name="ticket"]/@value)'))
form_vars["submit"] = unicode(form.xml_select(u'string(*/*[@type="submit"]/@value)'))
#pprint.pprint(form_vars)
return form_vars
def fill_page_delete_form(page, wiki_id, base, opener, headers=None):
url = absolutize(page, base)
request = urllib2.Request(url+"?action=DeletePage", None, headers)
try:
with closing(opener.open(request)) as resp:
x = resp.read(); resp = x
doc = htmlparse(resp)
raise_embedded_error(doc)
except urllib2.URLError,e:
if e.code == 403:
raise MoinMustAuthenticateError(url=request.get_full_url(),target=wiki_id)
else:
raise UnexpectedResponseError(url=request.get_full_url(),code=e.code,error=str(e))
form = doc.html.body.xml_select(u'.//form[@method="POST"]')[0]
form_vars = {}
form_vars["action"] = unicode(form.xml_select(u'string(*/*[@name="action"]/@value)'))
form_vars["ticket"] = unicode(form.xml_select(u'string(*/*[@name="ticket"]/@value)'))
form_vars["delete"] = unicode(form.xml_select(u'string(//input[@type="submit" and @name="delete"]/@value)'))
return form_vars
def scrape_page_history(page, base, opener, headers=None):
url = absolutize(page, base)
request = urllib2.Request(url+"?action=info", None, headers)
try:
with closing(opener.open(request)) as resp:
doc = htmlparse(resp)
raise_embedded_error(doc)
except urllib2.URLError,e:
# Comment concerning the behavior of MoinMoin. If an attempt is made to post to a page
# and the user is not authenticated, you will either get a 403 or 404 error depending
# on whether or not the page being edited exists or not. If it doesn't exist,
# MoinMoin sends back a 404 which is misleading. We raise MoinMustAuthenticateError
# to signal the error wrapper to issue a 401 back to the client
if e.code == 403 or e.code == 404:
raise MoinMustAuthenticateError(url=request.get_full_url(),target=wiki_id)
else:
raise UnexpectedResponse(url=request.get_full_url(),code=e.code,error=str(e))
info = []
try:
table = doc.html.body.xml_select(u'.//table[@id="dbw.table"]')[0]
except Exception as ex:
#XXX Seems to be a crazy XPath bug that only manifests here
#Use non-XPath as a hack-around :(
logger.debug('Stupid XPath bug. Working around... ' + repr(ex))
from amara.lib.util import element_subtree_iter
table = [ e for e in element_subtree_iter(doc.html.body) if e.xml_attributes.get(u'id') == u'dbw.table' ]
if not table:
#"Revision History... No log entries found." i.e. page not even yet created
return info
info = [
dict(rev=tr.td[0], date=tr.td[1], editor=tr.td[4])
for tr in table[0].xml_select(u'.//tr[td[@class="column1"]]')
#for tr in table.tbody.tr if tr.xml_select(u'td[@class="column1"]')
]
return info
# Extract any error embedded in an HTML response (returned by Moin in 2xx responses),
# and raise it as an HTTP error. Would be nice to handle this generically in
# moin_error_wrapper, but don't want to incur HTML parse cost
def raise_embedded_error(doc):
try:
error_div = doc.xml_select('//div[@class="error"]')
except:
return
if error_div:
raise GenericClientError(error=error_div.asString())
# ----------------------------------------------------------------------
# HTTP Method Handlers
# ----------------------------------------------------------------------
# The following functions implement versions of the various HTTP methods
# (GET, HEAD, POST, PUT, DELETE). Each method is actually implemented as a
# a pair of functions. One is a private implementation (e.g., _get_page).
# The other function is a wrapper that encloses each handler with the error
# handling function above (moin_error_handler). Again, this is to avoid
# excessive duplication of error handling code.
@method_dispatcher(SERVICE_ID, DEFAULT_MOUNT, wsgi_wrapper=moin_error_wrapper)
def dispatcher():
__doc__ = SAMPLE_QUERIES_DOC
return
@dispatcher.method("GET")
def get_page(environ, start_response):
req_headers = copy_headers_to_dict(environ,exclude=['HTTP_ACCEPT_ENCODING'])
wiki_id, base, opener, original_page, wrapped_wiki_base = target(environ)
page = environ['PATH_INFO'].lstrip('/')
check_auth(environ, start_response, base, opener, req_headers)
upstream_handler = None
status = httplib.OK
params = cgi.parse_qs(environ['QUERY_STRING'])
#Note: probably a better solution here: http://code.google.com/p/mimeparse/
accepted_imts = environ.get('HTTP_ACCEPT', '').split(',')
#logger.debug('accepted_imts: ' + repr(accepted_imts))
imt = first_item(dropwhile(lambda x: '*' in x, accepted_imts))
#logger.debug('imt: ' + repr(imt))
params_for_moin = {}
cache_max_age = CACHE_MAX_AGE # max-age of this response. If set to None, it will not be used
if NO_CACHE_PATH and NO_CACHE_PATH in page:
cache_max_age = None
if 'rev' in params:
#XXX: Not compatible with search
#params_for_moin = {'rev' : params['rev'][0], 'action': 'recall'}
params_for_moin = {'rev' : params['rev'][0]}
if 'search' in params:
searchq = params['search'][0]
query = urllib.urlencode({'value' : searchq, 'action': 'fullsearch', 'context': '180', 'fullsearch': 'Text'})
#?action=fullsearch&context=180&value=foo&=Text
url = absolutize('?'+query, base)
request = urllib2.Request(url, None, req_headers)
ctype = moin.RDF_IMT
cache_max_age = None
#elif 'action' in params and params['action'][0] == 'recall':
elif moin.HTML_IMT in environ.get('HTTP_ACCEPT', ''):
params = urllib.urlencode(params_for_moin)
url = absolutize(page+'?'+params, base)
request = urllib2.Request(url, None, req_headers)
ctype = moin.HTML_IMT
elif moin.RDF_IMT in environ.get('HTTP_ACCEPT', ''):
#FIXME: Make unique flag optional
#url = base + '/RecentChanges?action=rss_rc&unique=1&ddiffs=1'
url = absolutize('RecentChanges?action=rss_rc&unique=1&ddiffs=1', base)
#print >> sys.stderr, (url, base, '/RecentChanges?action=rss_rc&unique=1&ddiffs=1', )
request = urllib2.Request(url, None, req_headers)
ctype = moin.RDF_IMT
elif moin.ATTACHMENTS_IMT in environ.get('HTTP_ACCEPT', ''):
url = absolutize(page + '?action=AttachFile', base)
request = urllib2.Request(url, None, req_headers)
ctype = moin.ATTACHMENTS_IMT
def upstream_handler():
#Sigh. Sometimes you have to break some Tag soup eggs to make a RESTful omlette
with closing(opener.open(request)) as resp:
rbody = resp.read()
doc = htmlparse(rbody)
raise_embedded_error(doc)
attachment_nodes = doc.xml_select(u'//*[contains(@href, "action=AttachFile") and contains(@href, "do=view")]')
targets = []
for node in attachment_nodes:
target = [ param.split('=', 1)[1] for param in node.href.split(u'&') if param.startswith('target=') ][0]
targets.append(target)
output = structencoder(indent=u"yes")
output.feed(
ROOT(
E((u'attachments'),
(E(u'attachment', {u'href': unicode(t)}) for t in targets)
)
))
return output.read(), ctype
#Notes on use of URI parameters - http://markmail.org/message/gw6xbbvx4st6bksw
elif ';attachment=' in page:
page, attachment = page.split(';attachment=', 1)
url = absolutize(page + '?action=AttachFile&do=get&target=' + attachment, base)
request = urllib2.Request(url, None, req_headers)
def upstream_handler():
with closing(opener.open(request)) as resp:
rbody = resp.read()
return rbody, dict(resp.info())['content-type']
#
elif ';history' in page:
cache_max_age = None
page, discard = page.split(';history', 1)
ctype = moin.XML_IMT
def upstream_handler():
revs = scrape_page_history(page, base, opener, req_headers)
output = structencoder(indent=u"yes")
output.feed(
ROOT(
E((u'history'),
(E(u'rev', {u'id': unicode(r['rev']), u'editor': unicode(r['editor']), u'date': unicode(r['date']).replace(' ', 'T')}) for r in revs)
)
))
return output.read(), ctype
elif imt:
params_for_moin.update({'mimetype': imt})
params = urllib.urlencode(params_for_moin)
url = absolutize(page, base) + '?' + params
request = urllib2.Request(url, None, req_headers)
ctype = moin.DOCBOOK_IMT
else:
params_for_moin.update({'action': 'raw'})
params = urllib.urlencode(params_for_moin)
url = absolutize(page, base) + '?' + params
request = urllib2.Request(url, None, req_headers)
ctype = moin.WIKITEXT_IMT
try:
if upstream_handler:
rbody, ctype = upstream_handler()
else:
with closing(opener.open(request)) as resp:
rbody = resp.read()
#headers = {moin.ORIG_BASE_HEADER: base}
#moin_base = absolutize(wiki_id, base)
moin_base_info = base + ' ' + wrapped_wiki_base + ' ' + original_page
response_headers = [("Content-Type", ctype),
("Vary", "Accept"),
(moin.ORIG_BASE_HEADER, moin_base_info)]
if cache_max_age:
response_headers.append(("Cache-Control","max-age="+cache_max_age))
start_response(status_response(status), response_headers)
return rbody
except urllib2.URLError, e:
if e.code == 401:
raise HTTPAuthorizationError(url=request.get_full_url())
if e.code == 403:
raise MoinMustAuthenticateError(url=request.get_full_url(),target=wiki_id)
if e.code == 404:
raise MoinNotFoundError(fronturl=request_uri(environ),backurl=url)
else:
raise UnexpectedResponseError(url=url,code=e.code,error=str(e))
# PUT handler
@dispatcher.method("PUT")
def _put_page(environ, start_response):
'''
'''
req_headers = copy_headers_to_dict(environ,exclude=['HTTP_ACCEPT_ENCODING'])
wiki_id, base, opener, original_page, wrapped_wiki_base = target(environ)
page = environ['PATH_INFO'].lstrip('/')
check_auth(environ, start_response, base, opener, req_headers)
ctype = environ.get('CONTENT_TYPE', 'application/unknown')
temp_fpath = read_http_body_to_temp(environ, start_response)
form_vars = fill_page_edit_form(page, wiki_id, base, opener, req_headers)
form_vars["savetext"] = open(temp_fpath, "r").read()
url = absolutize(page, base)
data = urllib.urlencode(form_vars)
request = urllib2.Request(url, data, req_headers)
try:
logger.debug('Prior to urllib2.opener')
with closing(opener.open(request)) as resp:
logger.debug('Return from urllib2.opener')
doc = htmlparse(resp)
raise_embedded_error(doc)
logger.debug('HTML parse complete post urllib2.opener')
except urllib2.URLError,e:
raise UnexpectedResponseError(url=url,code=e.code,error=str(e))
msg = 'Page updated OK: ' + url
#response.add_header("Content-Length", str(len(msg)))
moin_base_info = base + ' ' + wrapped_wiki_base + ' ' + original_page
start_response(status_response(httplib.CREATED), [("Content-Type", "text/plain"), ("Content-Location", url), (moin.ORIG_BASE_HEADER, moin_base_info)])
return [msg]
# POST handler
@dispatcher.method("POST")
def post_page(environ, start_response):
'''
Attachments use URI path params
(for a bit of discussion see http://groups.google.com/group/comp.lang.python/browse_thread/thread/4662d41aca276d99)
'''
#ctype = environ.get('CONTENT_TYPE', 'application/unknown')
req_headers = copy_headers_to_dict(environ,exclude=['HTTP_ACCEPT_ENCODING'])
wiki_id, base, opener, original_page, wrapped_wiki_base = target(environ)
logger.debug("wiki_id,base,opener,original_age,wrapped_wiki_base="+repr((wiki_id,base,opener,original_page,wrapped_wiki_base)))
check_auth(environ, start_response, base, opener, req_headers)
page = environ['PATH_INFO'].lstrip('/')
page, chaff, attachment = page.partition(';attachment=')
# print >> sys.stderr, page, attachment
#now = datetime.now().isoformat()
#Unfortunately because urllib2's data dicts don't give an option for limiting read length, must read into memory and wrap
#content = StringIO(environ['wsgi.input'].read(clen))
temp_fpath = read_http_body_to_temp(environ, start_response)
form_vars = fill_attachment_form(page, attachment, wiki_id, base, opener, req_headers)
form_vars["file"] = open(temp_fpath, "rb")
url = absolutize(page, base)
#print >> sys.stderr, url, temp_fpath
#data = urllib.urlencode(form_vars)
request = urllib2.Request(url, form_vars, req_headers)
try:
with closing(opener.open(request)) as resp:
doc = htmlparse(resp)
raise_embedded_error(doc)
#logger.debug('POST for attachment page response... ' + doc.xml_encode())
except urllib2.URLError,e:
if e.code == 404:
raise MoinNotFoundError(fronturl=request_uri(environ), backurl=url)
else:
raise UnexpectedResponseError(url=url,code=e.code,error=str(e))
form_vars["file"].close()
os.remove(temp_fpath)
msg = 'Attachment updated OK: %s\n'%(url)
#response.add_header("Content-Length", str(len(msg)))
moin_base_info = base + ' ' + wrapped_wiki_base + ' ' + original_page
start_response(status_response(httplib.CREATED), [("Content-Type", "text/plain"), ("Content-Location", url), (moin.ORIG_BASE_HEADER, moin_base_info)])
return msg
# DELETE handler
@dispatcher.method("DELETE")
def _delete_page(environ, start_response):
'''
Deletes a Wiki page, returning 200 if successful. Does not yet support
the deletion of attachments.
'''
#The Moin form asks that this be in multipart-form format, but the multipart handler
#fallsback to url-encoding unless you pass it a file. Luckily, the equivalent
#url-encoded request works... for now.
req_headers = copy_headers_to_dict(environ,exclude=['HTTP_ACCEPT_ENCODING'])
wiki_id, base, opener, original_page, wrapped_wiki_base = target(environ)
page = environ['PATH_INFO'].lstrip('/')
check_auth(environ, start_response, base, opener, req_headers)
form_vars = fill_page_delete_form(page, wiki_id, base, opener, req_headers)
url = absolutize(page, base)
request = urllib2.Request(url, form_vars, req_headers)
try:
with closing(opener.open(request)) as resp:
doc = htmlparse(resp)
raise_embedded_error(doc)
except urllib2.URLError,e:
if e.code == 404:
# Moin returns 404 on a succcessful DeletePage POST; recast as a 200
pass
else:
raise UnexpectedResponseError(url=url,code=e.code,error=str(e))
msg = 'Page deleted OK: ' + url
start_response(status_response(httplib.OK),[("Content-Type","text/plain")])
return msg
| {
"content_hash": "63eda265f886e8bbaa30debae2e1c66e",
"timestamp": "",
"source": "github",
"line_count": 814,
"max_line_length": 204,
"avg_line_length": 44.221130221130224,
"alnum_prop": 0.6473219246582954,
"repo_name": "uogbuji/akara",
"id": "106b5f2941aa141f9ce4925a0566a3d0b6c7cb2e",
"size": "35996",
"binary": false,
"copies": "1",
"ref": "refs/heads/pregithub",
"path": "lib/demo/moinrest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "723024"
}
],
"symlink_target": ""
} |
from django.urls import reverse
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class UntappdAccount(ProviderAccount):
def get_profile_url(self):
return self.account.extra_data.get('untappd_url')
def get_avatar_url(self):
return self.account.extra_data.get('user_avatar')
def to_str(self):
dflt = super(UntappdAccount, self).to_str()
return self.account.extra_data.get('user_name', dflt)
class UntappdProvider(OAuth2Provider):
id = 'untappd'
name = 'Untappd'
account_class = UntappdAccount
def get_auth_params(self, request, action):
params = super(UntappdProvider, self).get_auth_params(request, action)
# Untappd uses redirect_url instead of redirect_uri
params['redirect_url'] = request.build_absolute_uri(
reverse(self.id + '_callback')
)
return params
def extract_uid(self, data):
return str(data['response']['user']['uid'])
def extract_common_fields(self, data):
user = data['response']['user']
return dict(
username=user['user_name'],
name=user['first_name'] + ' ' + user['last_name']
)
def extract_email_addresses(self, data):
ret = [EmailAddress(
email=data['response']['user']['settings']['email_address'],
verified=True,
primary=True
)]
return ret
provider_classes = [UntappdProvider]
| {
"content_hash": "48ad1b006e1d6b4e99eae42956363209",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 78,
"avg_line_length": 30.557692307692307,
"alnum_prop": 0.6431718061674009,
"repo_name": "pztrick/django-allauth",
"id": "260d0ce82820abc9dbfff24f3142c82ad32d885c",
"size": "1589",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "allauth/socialaccount/providers/untappd/provider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "104"
},
{
"name": "HTML",
"bytes": "42111"
},
{
"name": "JavaScript",
"bytes": "3260"
},
{
"name": "Makefile",
"bytes": "396"
},
{
"name": "Python",
"bytes": "671309"
}
],
"symlink_target": ""
} |
import re, sys, os, tempfile, subprocess, logging
CONFIGLINERE=re.compile("[ \t]*([a-z0-9_-]+)[ \t]*[=][ \t]*([.0-9-]+)(.*)", re.IGNORECASE)
USAGE='''USAGE:
configtool <FILE> set <key> <val>
configtool <FILE> get <key>
configtool <FILE> search <regexp>
configtool <FILE> list
configtool <FILE> print
'''
class ConfigFile:
'''manages a config file with dict()-like semantics, no further documentation needed'''
def __init__(self, src):
if type(src) == type(self):
'''copy constructor'''
self.values = dict(src.values)
elif type(src) == type(""):
'''construct from file'''
self.values = dict()
self.load(src)
else:
raise Exception("invalid arg:"+src)
def load(self, filename):
fd = open(filename)
for n, line in enumerate(fd):
try:
key,val,com = CONFIGLINERE.match(line).group(1,2,3)
self.add(key, val, com.strip())
except:
sys.stderr.write("WARNING: %s:%d -- failed to parse config line\n" % (filename,n))
fd.close()
def save(self, filename):
fd = open(filename, "w")
for k,valcom in sorted(self.values.iteritems()):
val, com = valcom
if type(val) is type(0.1):
fd.write("%s = %.25e %s\n" % (k, val, com))
else:
fd.write("%s = %d %s\n" % (k, val, com))
fd.close()
def __str__(self):
return "\n".join(map(lambda x: "%s = %d"%(x[0],x[1][0]), sorted(self.values.items())))
def __getitem__(self, k):
return self.values[k][0]
def __setitem__(self, k, v):
#logging.debug("configtool: changing %s from %d to %d", k, self[k], v)
com=self.values[k][1]
if type(v) is type(0.1) or "double" in com or "float" in com:
self.values[k] = (float(v), com)
else:
self.values[k] = (int(v), com)
def __hash__(self):
return hash(str(self))
def __cmp__(a, b):
return cmp(a.values, b.values)
def add(self, k, v, com="# added in script"):
if type(v) is type(0.1) or "double" in com or "float" in com:
self.values[k] = (float(v), com)
else:
self.values[k] = (int(v), com)
def keys(self):
return self.values.keys()
def defaultConfigFile(bin):
fd, name = tempfile.mkstemp(suffix='.cfg')
os.close(fd)
cmd = [bin, '--config='+name, '--reset']
NULL = open("/dev/null", "w")
subprocess.check_call(cmd, stderr=NULL)
cfg = ConfigFile(name)
os.unlink(name)
return cfg
def getConfigVal(filename, key):
'''legacy entry point to this file'''
try:
return ConfigFile(filename)[key]
except:
return None
def setConfigVal(filename, key, val):
'''legacy entry point to this file'''
cfg = ConfigFile()
try:
cfg.load(filename)
except:
sys.stderr.write("WARNING: failed to load config file "+filename+"\n")
try:
cfg[key]=val
except:
sys.stderr.write("WARNING: missing val %s in %s\n" % (key, filename))
cfg.add(key,val)
cfg.save(filename)
def main(argv):
#parse args
try:
IN=argv[1]
OUT=None
cfg = ConfigFile(IN)
i=2
while i<len(argv):
act=argv[i].lower()
if act=="set":
OUT=IN
cfg[argv[i+1]] = argv[i+2]
i+=3
elif act=="get":
print cfg[argv[i+1]]
i+=2
elif act=="search":
r = re.compile(argv[i+1], re.IGNORECASE)
for k in filter(lambda x: r.search(x), cfg.keys()):
print k
i+=2
elif act=="list":
for k in cfg.keys():
print k
i+=1
elif act=="print":
print str(cfg)
i+=1
else:
raise None
if OUT:
cfg.save(OUT)
except Exception, e:
print e
sys.stderr.write(USAGE)
if __name__ == "__main__":
main(sys.argv)
| {
"content_hash": "c27f5fd6ce9eeb66b782913f286503e7",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 90,
"avg_line_length": 25.65277777777778,
"alnum_prop": 0.565511640498105,
"repo_name": "petabricks/petabricks",
"id": "a2ce4fd646a46a3cd2945121306f027c6d1c57ae",
"size": "3716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/configtool.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "152389"
},
{
"name": "C++",
"bytes": "1973892"
},
{
"name": "Haskell",
"bytes": "142729"
},
{
"name": "Perl",
"bytes": "5073"
},
{
"name": "Python",
"bytes": "255234"
},
{
"name": "Shell",
"bytes": "57344"
},
{
"name": "TeX",
"bytes": "27247"
}
],
"symlink_target": ""
} |
__author__ = 'Xiaocheng Tang and Ted Ralphs'
__maintainer__ = 'Ted Ralphs'
__email__ = 'ted@lehigh.edu'
__version_ = '1.0.0'
__url__ = 'https://github.com/tkralphs/GoogleDriveScripts'
# Last modified 2/17/2016 Ted Ralphs
# Visit this URL to download client secret file
# https://console.developers.google.com/start/api?id=drive
import httplib2
import pprint
import sys, os
from os.path import expanduser, join
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import run_flow, argparser
import argparse
class DriveSort:
def __init__(self):
HOME = expanduser("~")
parser = argparse.ArgumentParser(parents=[argparser])
parser.add_argument('--folder-name', dest='folder_name',
help='Name of folder on Google Drive',
required=True)
parser.add_argument('--credentials-file', dest='credentials_file',
help='Name of file to get/store credentials',
default=join(HOME, '.gdrive_credentials'))
parser.add_argument('--dry-run', dest='dry_run', action='store_true',
help='Do dry run')
parser.add_argument('--user_agent', dest='user_agent',
help='Name of app under which to run the script')
parser.add_argument('--client_secret', dest='client_secret',
help='File in which client secret is stored',
default=join(HOME, '.client_secret.json'))
parser.add_argument('--email-domain', dest='email_domain',
help='Domain for e-mail addresses',
default=None)
parser.add_argument('--create-subfolders', dest='create_subfolders',
action='store_true',
help='Create subfolders for each file owner')
parser.add_argument('--move-files', dest='move_files',
action='store_true',
help='Move files to subfolders')
parser.add_argument('--change-permissions', dest='change_permissions',
action='store_true',
help='Change permissions on subfolders')
parser.add_argument('--list', dest='list_contents', action='store_true',
help='List all files in folder')
self.flags = parser.parse_args()
self.authorize()
def authorize(self):
# Check https://developers.google.com/drive/scopes for all available
# scopes
OAUTH_SCOPE = 'https://www.googleapis.com/auth/drive'
# Run through the OAuth flow and retrieve credentials
# Create a Storage object. This object holds the credentials that your
# application needs to authorize access to the user's data. The name of
# the credentials file is provided. If the file does not exist, it is
# created. This object can only hold credentials for a single user, so
# as-written, this script can only handle a single user.
storage = Storage(self.flags.credentials_file)
# The get() function returns the credentials for the Storage object.
# If no credentials were found, None is returned.
credentials = storage.get()
# If no credentials are found or the credentials are invalid due to
# expiration, new credentials need to be obtained from the authorization
# server. The oauth2client.tools.run() function attempts to open an
# authorization server page in your default web browser. The server
# asks the user to grant your application access to the user's data.
# If the user grants access, the run() function returns new credentials.
# The new credentials are also stored in the supplied Storage object,
# which updates the credentials.dat file.
if credentials is None or credentials.invalid:
flow = flow_from_clientsecrets(client_secret, OAUTH_SCOPE)
flow.user_agent = self.flags.user_agent
credentials = run_flow(flow, storage, self.flags)
# Create an httplib2.Http object and authorize it with our credentials
http = httplib2.Http()
http = credentials.authorize(http)
self.drive_service = build('drive', 'v3', http=http)
#http://stackoverflow.com/questions/13558653/
def createRemoteFolder(self, folderName, parentID = None):
# Create a folder on Drive, returns the newly created folders ID
body = {
'name': folderName,
'mimeType': "application/vnd.google-apps.folder"
}
if parentID:
body['parents'] = [parentID]
root_folder = self.drive_service.files().create(body = body).execute()
return root_folder['id']
def getFilesInFolder(self, folderName = None):
if folderName == None:
folderName = self.flags.folder_name
q = r"mimeType = 'application/vnd.google-apps.folder'"
pageToken = None
while True:
fields = "nextPageToken, "
fields += "files(id, name)"
results = self.drive_service.files().list(q=q, pageSize=1000,
pageToken=pageToken,
fields=fields).execute()
folders = results['files']
try:
folder_id = filter(lambda x: x['name'] == folderName,
folders)[0]['id']
except IndexError:
pageToken = results.get('nextPageToken')
if not results.get('nextPageToken'):
print "ERROR: Specified folder does not exist."
sys.exit()
else:
break
# search for all files under that folder
q = r"'{}' in parents".format(folder_id)
fields = ("files(id, name, mimeType, permissions, sharingUser, " +
"owners, parents)")
return (folder_id, self.drive_service.files().list(q=q, pageSize=1000,
fields=fields).execute()['files'])
def createSubFolders(self, folderName = None):
if folderName == None:
folderName = self.flags.folder_name
folder_id, files = self.getFilesInFolder(folderName)
print folder_id
user_ids = []
for f in files:
if f['mimeType'] != 'application/vnd.google-apps.folder':
# Google Drive seems to not change ownership sometimes...
try:
user_id = f['sharingUser']['emailAddress'].split('@')[0]
except KeyError:
user_id = f['owners'][0]['emailAddress'].split('@')[0]
if user_id not in user_ids:
user_ids.append(user_id)
self.folderIds = {}
for user_id in user_ids:
print "Creating folder", user_id
# Check to see if it's a dry run or folder is already there
if (self.flags.dry_run == False or
filter(lambda x: x['name'] == user_id, files) != []):
self.folderIds['user_id'] = self.createRemoteFolder(user_id,
folder_id)
def moveFiles(self, folderName = None):
if folderName == None:
folderName = self.flags.folder_name
folder_id, files = self.getFilesInFolder(folderName)
for f in files:
if f['mimeType'] != 'application/vnd.google-apps.folder':
# Google Drive seems to not change ownership sometimes...
try:
user_id = f['sharingUser']['emailAddress'].split('@')[0]
except KeyError:
user_id = f['owners'][0]['emailAddress'].split('@')[0]
print "Moving", f['name'], 'to', user_id
parents = f['parents']
if not self.flags.dry_run:
try:
new_parent = filter(lambda x: x['name'] ==
user_id, files)[0]['id']
except KeyError:
print "Folder not found. Maybe",
print "run creatFolders() again?"
self.drive_service.files().update(fileId=f['id'],
removeParents=parents[0],
addParents=new_parent
).execute()
def changePermissions(self, domain = None, folderName = None):
if folderName == None:
folderName = self.flags.folder_name
if domain == None:
if self.flags.email_domain:
domain = self.flags.email_domain
else:
print "ERROR: Must specify e-mail domain to change permissions."
sys.exit()
folder_id, files = self.getFilesInFolder(folderName)
for f in files:
if f['mimeType'] == 'application/vnd.google-apps.folder':
print 'Sharing', f['name'], 'with', '%s@%s'% (f['name'],
domain)
emailAddress = f['name']+"@"+domain
permissionId = None
for perms in f['permissions']:
if perms['emailAddress'] == emailAddress:
permissionId = perms['id']
if not self.flags.dry_run:
if permissionId:
new_perm = {
'role' : 'commenter'
}
try:
self.drive_service.permissions().update(
fileId=f['id'],
permissionId=permissionId,
body = new_perm).execute()
except:
print "Could not change permissions on", f['name']
else:
new_perm = {
'emailAddress' : emailAddress,
'type' : 'user',
'role' : 'commenter'
}
self.drive_service.permissions().create(fileId=f['id'],
permissionId=permissionId,
body = new_perm).execute()
if __name__ == '__main__':
# Parse arguments and authorize connection
drive = DriveSort()
# Print names of all files in folder
if drive.flags.list_contents:
print "Folder contents:"
for f in drive.getFilesInFolder()[1]:
print f['name']
#Create subfolder with same name as e-mail user ID of last modifying user
if drive.flags.create_subfolders:
drive.createSubFolders()
# Move files into folders
if drive.flags.move_files:
drive.moveFiles()
# Grant permission to original owner
if drive.flags.change_permissions:
drive.changePermissions()
| {
"content_hash": "e0246b51a59ccb7476de301e0c8e1c0a",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 80,
"avg_line_length": 45.36363636363637,
"alnum_prop": 0.5253114925503181,
"repo_name": "tkralphs/GoogleDriveScripts",
"id": "88b5ba2de15eb1655297ace5fa03c976e453f870",
"size": "11500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DriveSort.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9390"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
import csv
import json
import os
from django.conf import settings
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon
from django.core.management.base import BaseCommand
from scuole.states.models import Commissioner, State
class Command(BaseCommand):
help = 'Bootstraps state models.'
def handle(self, *args, **options):
state_json = os.path.join(
settings.DATA_FOLDER,
'state', 'shape', 'tx.geojson')
self.shape_data = self.load_geojson_file(state_json)
commissioner_csv = os.path.join(
settings.DATA_FOLDER,
'state', 'commissioner.csv')
self.commissioner_data = self.load_commissioner_file(
commissioner_csv)
self.create_state()
def load_geojson_file(self, file):
payload = {}
with open(file, 'rU') as f:
data = json.load(f)
for feature in data['features']:
payload = feature['geometry']
return payload
def load_commissioner_file(self, file):
with open(file, 'rU') as f:
reader = csv.DictReader(f)
return next(reader)
def create_state(self):
self.stdout.write('Creating Texas...')
geometry = GEOSGeometry(
json.dumps(self.shape_data))
# checks to see if the geometry is a multipolygon
if geometry.geom_typeid == 3:
geometry = MultiPolygon(geometry)
state, _ = State.objects.update_or_create(
slug='tx',
defaults={
'name': 'TX',
'shape': geometry
}
)
commissioner = self.commissioner_data
self.load_commissioner(state, commissioner)
def load_commissioner(self, state, commissioner):
name = commissioner['Full Name']
Commissioner.objects.update_or_create(
state=state,
defaults={
'name': name,
'role': commissioner['Role'],
'phone_number': commissioner['Phone'],
'email': commissioner['Email'],
}
)
| {
"content_hash": "905a96798edf593bcd99e11ede63808a",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 62,
"avg_line_length": 27.02469135802469,
"alnum_prop": 0.5746916400182732,
"repo_name": "texastribune/scuole",
"id": "a1de8058c24d47b3d8da78a1e49ad8a5cabcd65a",
"size": "2213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scuole/states/management/commands/bootstrapstates.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1596"
},
{
"name": "HTML",
"bytes": "122611"
},
{
"name": "JavaScript",
"bytes": "44740"
},
{
"name": "Jupyter Notebook",
"bytes": "18454"
},
{
"name": "Makefile",
"bytes": "7139"
},
{
"name": "Python",
"bytes": "611799"
},
{
"name": "SCSS",
"bytes": "32865"
},
{
"name": "Shell",
"bytes": "700"
}
],
"symlink_target": ""
} |
import sahara.plugins.mapr.base.base_cluster_context as bc
import sahara.plugins.mapr.services.yarn.yarn as yarn
class Context(bc.BaseClusterContext):
def __init__(self, cluster, version_handler, added=None, removed=None):
super(Context, self).__init__(cluster, version_handler, added, removed)
self._hadoop_version = yarn.YARNv251().version
self._hadoop_lib = None
self._hadoop_conf = None
self._cluster_mode = yarn.YARNv251.cluster_mode
self._node_aware = True
self._resource_manager_uri = None
self._mapr_version = '4.0.2'
self._ubuntu_ecosystem_repo = (
'http://package.mapr.com/releases/ecosystem-4.x/ubuntu binary/')
self._centos_ecosystem_repo = (
'http://package.mapr.com/releases/ecosystem-4.x/redhat')
@property
def hadoop_lib(self):
if not self._hadoop_lib:
self._hadoop_lib = '%s/share/hadoop/common' % self.hadoop_home
return self._hadoop_lib
@property
def hadoop_conf(self):
if not self._hadoop_conf:
self._hadoop_conf = '%s/etc/hadoop' % self.hadoop_home
return self._hadoop_conf
@property
def resource_manager_uri(self):
# FIXME(aosadchyi): Wait for RM HA to work properly
if not self._resource_manager_uri:
ip = self.get_instance(yarn.RESOURCE_MANAGER).internal_ip
self._resource_manager_uri = '%s:8032' % ip
return self._resource_manager_uri
@property
def configure_sh(self):
if not self._configure_sh:
f = '%(base)s -RM %(resource_manager)s -HS %(history_server)s'
args = {
'base': super(Context, self).configure_sh,
'resource_manager': self.get_resourcemanager_ip(),
'history_server': self.get_historyserver_ip(),
}
self._configure_sh = f % args
return self._configure_sh
| {
"content_hash": "4517a6e9a05d1b33db37499f59420d43",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 79,
"avg_line_length": 39.08,
"alnum_prop": 0.6064483111566018,
"repo_name": "crobby/sahara",
"id": "4af1f37d678672cb21b95c39a877d028192187b0",
"size": "2544",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "sahara/plugins/mapr/versions/v4_0_2_mrv2/context.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3609"
},
{
"name": "Mako",
"bytes": "33627"
},
{
"name": "PigLatin",
"bytes": "792"
},
{
"name": "Python",
"bytes": "3355980"
},
{
"name": "Shell",
"bytes": "61693"
}
],
"symlink_target": ""
} |
"""
Contains the user interface in the Universe class
"""
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy import util
import logging, os, string
from random import choice
log = logging.getLogger( __name__ )
require_login_template = """
<h1>Welcome to Galaxy</h1>
<p>
This installation of Galaxy has been configured such that only users who are logged in may use it.%s
</p>
<p/>
"""
require_login_nocreation_template = require_login_template % ""
require_login_creation_template = require_login_template % " If you don't already have an account, <a href='%s'>you may create one</a>."
class User( BaseController ):
@web.expose
def index( self, trans, **kwd ):
return trans.fill_template( '/user/index.mako', user=trans.get_user() )
@web.expose
def change_password(self, trans, old_pass='', new_pass='', conf_pass='', **kwd):
old_pass_err = new_pass_err = conf_pass_err = ''
user = trans.get_user()
if not user:
trans.response.send_redirect( web.url_for( action='login' ) )
if trans.request.method == 'POST':
if not user.check_password( old_pass ):
old_pass_err = "Invalid password"
elif len( new_pass ) < 6:
new_pass_err = "Please use a password of at least 6 characters"
elif new_pass != conf_pass:
conf_pass_err = "New passwords do not match."
else:
user.set_password_cleartext( new_pass )
user.flush()
trans.log_event( "User change password" )
return trans.show_ok_message( "Password has been changed for " + user.email)
# Generate input form
return trans.show_form(
web.FormBuilder( web.url_for() , "Change Password", submit_text="Submit" )
.add_password( "old_pass", "Old Password", value='', error=old_pass_err )
.add_password( "new_pass", "New Password", value='', error=new_pass_err )
.add_password( "conf_pass", "Confirm Password", value='', error=conf_pass_err ) )
@web.expose
def change_email(self, trans, email='', conf_email='', password='', **kwd):
email_err = conf_email_err = pass_err = ''
user = trans.get_user()
if not user:
trans.response.send_redirect( web.url_for( action='login' ) )
if trans.request.method == "POST":
if not user.check_password( password ):
pass_err = "Invalid password"
elif len( email ) == 0 or "@" not in email or "." not in email:
email_err = "Please enter a real email address"
elif len( email) > 255:
email_err = "Email address exceeds maximum allowable length"
elif trans.app.model.User.filter_by( email=email ).first():
email_err = "User with that email already exists"
elif email != conf_email:
conf_email_err = "Email addresses do not match."
else:
user.email = email
user.flush()
trans.log_event( "User change email" )
return trans.show_ok_message( "Email has been changed to: " + user.email, refresh_frames=['masthead', 'history'] )
return trans.show_form(
web.FormBuilder( web.url_for(), "Change Email", submit_text="Submit" )
.add_text( "email", "Email", value=email, error=email_err )
.add_text( "conf_email", "Confirm Email", value='', error=conf_email_err )
.add_password( "password", "Password", value='', error=pass_err ) )
@web.expose
def login( self, trans, email='', password='' ):
email_error = password_error = None
# Attempt login
if trans.app.config.require_login:
refresh_frames = [ 'masthead', 'history', 'tools' ]
else:
refresh_frames = [ 'masthead', 'history' ]
if email or password:
user = trans.app.model.User.filter( trans.app.model.User.table.c.email==email ).first()
if not user:
email_error = "No such user"
elif user.deleted:
email_error = "This account has been marked deleted, contact your Galaxy administrator to restore the account."
elif user.external:
email_error = "This account was created for use with an external authentication method, contact your local Galaxy administrator to activate it."
elif not user.check_password( password ):
password_error = "Invalid password"
else:
trans.handle_user_login( user )
trans.log_event( "User logged in" )
msg = "Now logged in as " + user.email + "."
if trans.app.config.require_login:
msg += ' <a href="%s">Click here</a> to continue to the front page.' % web.url_for( '/static/welcome.html' )
return trans.show_ok_message( msg, refresh_frames=refresh_frames )
form = web.FormBuilder( web.url_for(), "Login", submit_text="Login" ) \
.add_text( "email", "Email address", value=email, error=email_error ) \
.add_password( "password", "Password", value='', error=password_error,
help="<a href='%s'>Forgot password? Reset here</a>" % web.url_for( action='reset_password' ) )
if trans.app.config.require_login:
if trans.app.config.allow_user_creation:
return trans.show_form( form, header = require_login_creation_template % web.url_for( action = 'create' ) )
else:
return trans.show_form( form, header = require_login_nocreation_template )
else:
return trans.show_form( form )
@web.expose
def logout( self, trans ):
if trans.app.config.require_login:
refresh_frames = [ 'masthead', 'history', 'tools' ]
else:
refresh_frames = [ 'masthead', 'history' ]
# Since logging an event requires a session, we'll log prior to ending the session
trans.log_event( "User logged out" )
trans.handle_user_logout()
msg = "You are no longer logged in."
if trans.app.config.require_login:
msg += ' <a href="%s">Click here</a> to return to the login page.' % web.url_for( controller='user', action='login' )
return trans.show_ok_message( msg, refresh_frames=refresh_frames )
@web.expose
def create( self, trans, email='', password='', confirm='', subscribe=False ):
if trans.app.config.require_login:
refresh_frames = [ 'masthead', 'history', 'tools' ]
else:
refresh_frames = [ 'masthead', 'history' ]
if not trans.app.config.allow_user_creation and not trans.user_is_admin():
return trans.show_error_message( 'User registration is disabled. Please contact your Galaxy administrator for an account.' )
email_error = password_error = confirm_error = None
if email:
if len( email ) == 0 or "@" not in email or "." not in email:
email_error = "Please enter a real email address"
elif len( email ) > 255:
email_error = "Email address exceeds maximum allowable length"
elif trans.app.model.User.filter( and_( trans.app.model.User.table.c.email==email,
trans.app.model.User.table.c.deleted==False ) ).first():
email_error = "User with that email already exists"
elif len( password ) < 6:
password_error = "Please use a password of at least 6 characters"
elif password != confirm:
confirm_error = "Passwords do not match"
else:
user = trans.app.model.User( email=email )
user.set_password_cleartext( password )
user.flush()
trans.app.security_agent.create_private_user_role( user )
# We set default user permissions, before we log in and set the default history permissions
trans.app.security_agent.user_set_default_permissions( user )
# The handle_user_login() method has a call to the history_set_default_permissions() method
# (needed when logging in with a history), user needs to have default permissions set before logging in
trans.handle_user_login( user )
trans.log_event( "User created a new account" )
trans.log_event( "User logged in" )
#subscribe user to email list
if subscribe:
mail = os.popen("%s -t" % trans.app.config.sendmail_path, 'w')
mail.write("To: %s\nFrom: %s\nSubject: Join Mailing List\n\nJoin Mailing list." % (trans.app.config.mailing_join_addr,email) )
if mail.close():
return trans.show_warn_message( "Now logged in as " + user.email+". However, subscribing to the mailing list has failed.", refresh_frames=['masthead', 'history'] )
return trans.show_ok_message( "Now logged in as " + user.email, refresh_frames=['masthead', 'history'] )
return trans.show_form(
web.FormBuilder( web.url_for(), "Create account", submit_text="Create" )
.add_text( "email", "Email address", value=email, error=email_error )
.add_password( "password", "Password", value='', error=password_error )
.add_password( "confirm", "Confirm password", value='', error=confirm_error )
.add_input( "checkbox","Subscribe To Mailing List","subscribe", value='subscribe' ) )
@web.expose
def reset_password( self, trans, email=None, **kwd ):
error = ''
reset_user = trans.app.model.User.filter( trans.app.model.User.table.c.email==email ).first()
user = trans.get_user()
if reset_user:
if user and user.id != reset_user.id:
error = "You may only reset your own password"
else:
chars = string.letters + string.digits
new_pass = ""
for i in range(15):
new_pass = new_pass + choice(chars)
mail = os.popen("%s -t" % trans.app.config.sendmail_path, 'w')
mail.write("To: %s\nFrom: no-reply@%s\nSubject: Galaxy Password Reset\n\nYour password has been reset to \"%s\" (no quotes)." % (email, trans.request.remote_addr, new_pass) )
if mail.close():
return trans.show_error_message( 'Failed to reset password. If this problem persists, please submit a bug report.' )
reset_user.set_password_cleartext( new_pass )
reset_user.flush()
trans.log_event( "User reset password: %s" % email )
return trans.show_ok_message( "Password has been reset and emailed to: %s. <a href='%s'>Click here</a> to return to the login form." % ( email, web.url_for( action='login' ) ) )
elif email != None:
error = "The specified user does not exist"
return trans.show_form(
web.FormBuilder( web.url_for(), "Reset Password", submit_text="Submit" )
.add_text( "email", "Email", value=email, error=error ) )
@web.expose
def set_default_permissions( self, trans, **kwd ):
"""Sets the user's default permissions for the new histories"""
if trans.user:
if 'update_roles_button' in kwd:
p = util.Params( kwd )
permissions = {}
for k, v in trans.app.model.Dataset.permitted_actions.items():
in_roles = p.get( k + '_in', [] )
if not isinstance( in_roles, list ):
in_roles = [ in_roles ]
in_roles = [ trans.app.model.Role.get( x ) for x in in_roles ]
action = trans.app.security_agent.get_action( v.action ).action
permissions[ action ] = in_roles
trans.app.security_agent.user_set_default_permissions( trans.user, permissions )
return trans.show_ok_message( 'Default new history permissions have been changed.' )
return trans.fill_template( 'user/permissions.mako' )
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
| {
"content_hash": "ddf5bab412872dbe9fb77f9ec7ac25cb",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 194,
"avg_line_length": 56.14601769911504,
"alnum_prop": 0.5731736149420759,
"repo_name": "dbcls/dbcls-galaxy",
"id": "3b62573fb0d0dcffd047c7c4e296a4c9fdd8dadb",
"size": "12689",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/galaxy/web/controllers/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "646729"
},
{
"name": "Perl",
"bytes": "40970"
},
{
"name": "Python",
"bytes": "2646651"
},
{
"name": "Ruby",
"bytes": "145028"
},
{
"name": "Shell",
"bytes": "21457"
}
],
"symlink_target": ""
} |
import webbrowser
from oauth2client.client import OAuth2WebServerFlow, FlowExchangeError
from oauth2client.keyring_storage import Storage
from cactus.deployment.gcs import CACTUS_CLIENT_ID, CACTUS_CLIENT_SECRET, CACTUS_REQUIRED_SCOPE, LOCAL_REDIRECT_URI
from cactus.exceptions import InvalidCredentials
class GCSCredentialsManager(object):
def __init__(self, engine):
self.engine = engine # TODO: Only pass those things that are needed?
self.credentials = None
def get_storage(self):
# TODO: Not a great key, but do we want to ask for email?
return Storage("cactus/gcs", self.engine.bucket_name)
def get_credentials(self):
if self.credentials is not None:
return self.credentials
self.credentials = self.get_storage().get()
if self.credentials is None:
flow = OAuth2WebServerFlow(
client_id=CACTUS_CLIENT_ID,
client_secret=CACTUS_CLIENT_SECRET,
scope=CACTUS_REQUIRED_SCOPE,
redirect_uri=LOCAL_REDIRECT_URI
)
auth_uri = flow.step1_get_authorize_url()
webbrowser.open(auth_uri) # TODO: Actually print the URL...
code = self.engine.site.ui.prompt('Please enter the authorization code')
try:
self.credentials = flow.step2_exchange(code) # TODO: Catch invalid grant
except FlowExchangeError:
raise InvalidCredentials("The authorization did not match.")
return self.credentials
def save_credentials(self):
assert self.credentials is not None, "You did not set credentials before saving them" # TODO: That's still bad
self.get_storage().put(self.credentials)
| {
"content_hash": "5375857c84b02aa2b5f35b677218cb52",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 119,
"avg_line_length": 37.38297872340426,
"alnum_prop": 0.6585088218554354,
"repo_name": "dreadatour/Cactus",
"id": "833e7a0e7bb02c632a1d0f49c1d502a59e3a0214",
"size": "1772",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cactus/deployment/gcs/auth.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "261"
},
{
"name": "HTML",
"bytes": "8133"
},
{
"name": "JavaScript",
"bytes": "60"
},
{
"name": "Makefile",
"bytes": "448"
},
{
"name": "Python",
"bytes": "229376"
}
],
"symlink_target": ""
} |
import unittest
import sys
sys.path.append('LeetCode/_001_050')
sys.path.append('LeetCode.Test')
from _024_SwapNodesInPairs import Solution
import TestHelper, AssertHelper
class Test_024_SwapNodesInPairs(unittest.TestCase):
def test_swapPairs(self):
link = TestHelper.generateLinkList([1, 2, 3, 4])
solution = Solution()
result = solution.swapPairs(link)
AssertHelper.assertLinkList([2, 1, 4, 3], result)
def test_swapPairs_empty(self):
solution = Solution()
result = solution.swapPairs(None)
self.assertEqual(None, result)
def test_swapPairs_onlyOne(self):
link = TestHelper.generateLinkList([1])
solution = Solution()
result = solution.swapPairs(link)
AssertHelper.assertLinkList([1], result)
def test_swapPairs_onlyTwo(self):
link = TestHelper.generateLinkList([1, 2])
solution = Solution()
result = solution.swapPairs(link)
AssertHelper.assertLinkList([2, 1], result)
def test_swapPairs_Odd(self):
link = TestHelper.generateLinkList([1, 2, 3])
solution = Solution()
result = solution.swapPairs(link)
AssertHelper.assertLinkList([2, 1, 3], result)
| {
"content_hash": "8727f5b449b3d72e2e87d9fc95a0e630",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 57,
"avg_line_length": 28.72093023255814,
"alnum_prop": 0.6591093117408907,
"repo_name": "BigEgg/LeetCode",
"id": "50f43ab8f031b2dd9e0f1fa5b54304f6b07c929f",
"size": "1235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/LeetCode.Test/_001_050/Test_024_SwapNodesInPairs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6405"
},
{
"name": "C#",
"bytes": "460435"
},
{
"name": "C++",
"bytes": "49261"
},
{
"name": "HTML",
"bytes": "1371"
},
{
"name": "Java",
"bytes": "22777"
},
{
"name": "JavaScript",
"bytes": "41935"
},
{
"name": "Python",
"bytes": "167427"
},
{
"name": "Smalltalk",
"bytes": "1174"
}
],
"symlink_target": ""
} |
from django.test import RequestFactory, TestCase
from feder.teryt.factories import JSTFactory
from teryt_tree.rest_framework_ext.viewsets import JednostkaAdministracyjnaViewSet
class TerytViewSetTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.jst = JSTFactory()
def test_list_display(self):
request = self.factory.get("/")
view_func = JednostkaAdministracyjnaViewSet.as_view({"get": "list"})
response = view_func(request)
self.assertEqual(response.status_code, 200)
def test_details_display(self):
request = self.factory.get("/")
view_func = JednostkaAdministracyjnaViewSet.as_view({"get": "retrieve"})
response = view_func(request, pk=self.jst.pk)
self.assertEqual(response.status_code, 200)
| {
"content_hash": "d3ed6cd72a9ff2bd0a14a534b2252225",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 82,
"avg_line_length": 37.09090909090909,
"alnum_prop": 0.696078431372549,
"repo_name": "watchdogpolska/feder",
"id": "79c3cf15c6d154918973d555c337a96f678dda72",
"size": "816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feder/teryt/tests/test_viewsets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "624"
},
{
"name": "HTML",
"bytes": "183421"
},
{
"name": "JavaScript",
"bytes": "6245"
},
{
"name": "Makefile",
"bytes": "2086"
},
{
"name": "Python",
"bytes": "574027"
},
{
"name": "SCSS",
"bytes": "40546"
},
{
"name": "Shell",
"bytes": "214"
}
],
"symlink_target": ""
} |
"""
API v1.0 Command and Control (C2) direct access routes:
Enter direct access mode: /c2/instrument/<string:reference_designator>/direct_access/start
Execute direct access command /c2/instrument/<string:reference_designator>/direct_access/execute
Exit direct access mode: /c2/instrument/<string:reference_designator>/direct_access/exit
Get sniffer data from instrument /c2/instrument/<string:reference_designator>/direct_access/sniffer
"""
__author__ = 'Edna Donoughe'
from flask import jsonify, current_app, request
from ooiservices.app.decorators import scope_required
from ooiservices.app.main import api
from ooiservices.app.main.errors import bad_request
from ooiservices.app.main.authentication import auth
from ooiservices.app.main.c2 import _c2_get_instrument_driver_status, uframe_post_instrument_driver_command, \
_eval_POST_response_data
import requests
from requests.exceptions import ConnectionError, Timeout
#from ooiservices.app.uframe.config import get_c2_uframe_info, get_uframe_timeout_info
import socket as sock
import ast
import json
import base64
# Direct Access start.
# todo deprecate 'GET'?
@api.route('/c2/instrument/<string:reference_designator>/direct_access/start', methods=['POST', 'GET'])
@auth.login_required
@scope_required(u'command_control')
def c2_direct_access_start(reference_designator):
""" Start direct access. (when button 'Start Direct' is selected.)
(Transition from 'DRIVER_STATE_COMMAND' to 'DRIVER_STATE_DIRECT_ACCESS'.)
POST Sample:
http://uft21.ooi.rutgers.edu:12572/instrument/api/RS10ENGC-XX00X-00-FLORDD001/start
Command: "DRIVER_EVENT_START_DIRECT"
"""
debug = False
rd = reference_designator
NOT_NONE = 'NOT_NONE'
state_DRIVER_STATE_COMMAND = 'DRIVER_STATE_COMMAND'
capability_DRIVER_EVENT_START_DIRECT = 'DRIVER_EVENT_START_DIRECT'
target_state = 'DRIVER_STATE_DIRECT_ACCESS'
try:
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Prepare to execute - direct access start command
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Validate reference_designator
_state, _capabilities, result = direct_access_get_state_and_capabilities(rd)
if _state == target_state:
return jsonify(result)
# Verify _state and _capabilities match expected state and capabilities
verify_state_and_capabilities(rd, _state, _capabilities,
expected_state=NOT_NONE,
expected_capability=capability_DRIVER_EVENT_START_DIRECT)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Execute driver command 'DRIVER_EVENT_START_DIRECT' on upstream server
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Execute driver command
suffix = 'command=%22DRIVER_EVENT_START_DIRECT%22&timeout=60000'
response = uframe_post_instrument_driver_command(reference_designator, 'execute', suffix)
if response.status_code != 200:
message = '(%s) execute %s failed.' % (str(response.status_code), capability_DRIVER_EVENT_START_DIRECT)
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify command execution status by reviewing error information returned from instrument driver
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if response.content:
try:
response_data = json.loads(response.content)
except Exception:
raise Exception('Direct access start command - malformed response data; invalid json format.')
# Evaluate response content for error (review 'value' list info in response_data )
if response_data:
status_code, status_type, status_message = _eval_POST_response_data(response_data, "")
#- - - - - - - - - - - - - - - - - - - - - - - - - -
if debug:
print '\n START response_data: ', json.dumps(response_data, indent=4, sort_keys=True)
print '\n direct_access START - status_code: ', status_code
if status_code != 200:
print '\n direct_access START - status_message: ', status_message
#- - - - - - - - - - - - - - - - - - - - - - - - - -
if status_code != 200:
raise Exception(status_message)
# Validate reference_designator
_state, _capabilities, result = direct_access_get_state_and_capabilities(rd)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Final - direct access response final checks for success or failure
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify _state and _capabilities match expected state and capabilities
verify_state_and_capabilities(rd, _state, _capabilities,
expected_state=target_state,
expected_capability=NOT_NONE)
return jsonify(result)
except Exception as err:
message = '(%s) direct access start exception: %s' % (rd, err.message)
current_app.logger.info(message)
return bad_request(err.message)
def _headers():
""" Headers for uframe POST.
"""
return {"Content-Type": "application/json"}
def new_send_command(rd, command):
debug = False
response_data = None
try:
if debug: print '\n debug -- command: ', command
b64_command = '_base64:' + base64.b64encode(command)
json_data = {'command': 'EXECUTE_DIRECT', 'kwargs': {'data': b64_command}}
if debug: print '\n debug -- json_data: ', json_data
uframe_url, timeout, timeout_read = get_c2_uframe_info()
url = "/".join([uframe_url, rd, 'execute'])
if debug: print '\n debug -- url: ', url
response = requests.post(url, json=json_data, timeout=(timeout, timeout_read), headers=_headers())
if response.status_code == 200:
# Evaluate response content for error (review 'value' list info in response_data )
if response.content:
response_data = json.loads(response.content)
if response_data:
status_code, status_type, status_message = _eval_POST_response_data(response_data, "")
#- - - - - - - - - - - - - - - - - - - - - - - - - -
if debug:
print '\n execute response_data: ', json.dumps(response_data, indent=4, sort_keys=True)
print '\n direct_access send_new_command - status_code: ', status_code
if status_code != 200:
print '\n direct_access send_new_command - status_message: ', status_message
#- - - - - - - - - - - - - - - - - - - - - - - - - -
if status_code != 200:
raise Exception(status_message)
if debug:
print '\n status_code: ', response.status_code
return
except ConnectionError:
message = 'ConnectionError for instrument/api configuration values.'
current_app.logger.info(message)
raise Exception(message)
except Timeout:
message = 'Timeout for instrument/api configuration values.'
current_app.logger.info(message)
raise Exception(message)
except Exception as err:
if debug: print '\nexception in new_send_command: ', str(err)
raise
# Direct Access execute command.
@api.route('/c2/instrument/<string:reference_designator>/direct_access/execute', methods=['POST'])
@auth.login_required
@scope_required(u'command_control')
def c2_direct_access_execute(reference_designator):
""" Execute direct access command.
While in 'DRIVER_STATE_DIRECT_ACCESS', execute commands sent from direct access terminal window.
Process direct access terminal commands:
Receive content, send to instrument driver.
[Upon receipt of response from instrument, forward response to UI.] Use sniffer.
Note valid commands in direct_access_buttons list:
"direct_access_buttons": [
"Interrupt",
"Print Menu",
"Print Metadata",
"Read Data",
"Restore Factory Defaults",
"Restore Settings",
"Run Settings",
"Run Wiper",
"Save Settings",
"Set Clock>",
"Set Date>",
"Set>"
],
"input_dict": {
"Interrupt": "!!!!!",
"Print Menu": "$mnu\r\n",
"Print Metadata": "$met\r\n",
"Read Data": "$get\r\n",
"Restore Factory Defaults": "$rfd\r\n",
"Restore Settings": "$rls\r\n",
"Run Settings": "$run\r\n",
"Run Wiper": "$mvs\r\n",
"Save Settings": "$sto\r\n",
"Set Clock>": "$clk ",
"Set Date>": "$date \r\n",
"Set>": "set "
},
POST request.data shall provide attribute 'command' or 'command_text':
{
"command": "Print Metadata"
"title": "FLOR"
}
where valid command value is one of items in direct_access_buttons dictionary (key for input_config).
OR
{
"command_text": "$mnu\r\n"
"title": "FLOR"
}
"""
rd = reference_designator
TRIPS = '"""'
NOT_NONE = 'NOT_NONE'
state_DRIVER_STATE_DIRECT_ACCESS = 'DRIVER_STATE_DIRECT_ACCESS'
target_state = state_DRIVER_STATE_DIRECT_ACCESS
try:
command_request = None
command_text = None
command_request_value = None
using_command_request = True
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get request data, process required items.
# Sample request_data: {u'command': u'Nano On', u'title': u'instrument'}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if not request.data:
message = 'Direct access execute command requires request.data for POST.'
raise Exception(message)
# Get request data and process
request_data = json.loads(request.data)
if request_data is None:
message = 'Direct access execute command did not receive request data (%s).' % rd
raise Exception(message)
if 'title' not in request_data:
message = 'Malformed direct access execute command, missing title (%s).' % rd
raise Exception(message)
if ('command' not in request_data) and ('command_text' not in request_data):
message = 'Malformed direct access execute command, missing command or command text (%s).' % rd
raise Exception(message)
# Get title, and command_request or command_text.
title = request_data['title']
if 'command' in request_data:
command_request = request_data['command']
command_text = None
elif 'command_text' in request_data:
command_text = request_data['command_text']
command_request = None
using_command_request = False
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify required fields are not None.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if title is None:
message = 'No direct access title data provided for instrument %s.' % rd
raise Exception(message)
if using_command_request:
if command_request is None:
message = 'No direct access command data provided for instrument %s.' % rd
raise Exception(message)
else:
if command_text is None:
message = 'No direct access command_text data provided for instrument %s.' % rd
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Prepare to execute - get state, capabilities and status.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
_state, _capabilities, result = direct_access_get_state_and_capabilities(rd)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify current _state and _capabilities match expected state and capabilities
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
verify_state_and_capabilities(rd, _state, _capabilities,
expected_state=state_DRIVER_STATE_DIRECT_ACCESS,
expected_capability=NOT_NONE)
if using_command_request:
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get valid direct access commands from direct_access_buttons
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
valid_commands = []
if result:
if 'direct_access_buttons' in result:
valid_commands = result['direct_access_buttons']
else:
message = 'Instrument %s missing direct_access_buttons dictionary.' % rd
raise Exception(message)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify there are valid commands; otherwise error.
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if not valid_commands:
message = 'Instrument %s direct_access_buttons list is empty.' % rd
raise Exception(message)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify command_request from request data is a valid command; otherwise error.
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if command_request not in valid_commands:
message = 'Instrument %s command received \'%s\' not in list of available commands.' % \
(rd, command_request)
raise Exception(message)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify direct_config available; otherwise error.
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if 'direct_config' not in result['value']:
message = 'Instrument %s has missing direct access direct_config list.' % rd
raise Exception(message)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify direct_config is not empty; otherwise error.
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if not result['value']['direct_config']:
message = 'Instrument %s has empty direct access direct_config list.' % rd
raise Exception(message)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# If direct_config has contents, process list of dictionaries
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ip = None
data = None
eol = None
located_requested_item = False
for direct_config in result['value']['direct_config']:
# Get and check title; if title in dictionary does not match requested title; go to next item.
_title = None
if 'title' in direct_config:
_title = direct_config['title']
if _title != title:
continue
# Identified item in direct_config; process item
located_requested_item = True
"""
# Get and check ip from direct_config dictionary
ip = None
if 'ip' in direct_config:
ip = direct_config['ip']
if ip is None or not ip:
message = 'Instrument %s has invalid ip: \'%r\'.' % (rd, ip)
raise Exception(message)
# Get and check data from direct_config dictionary
data = None
if 'data' in direct_config:
data = direct_config['data']
if isinstance(data, int):
if data < 0:
#if data is None or not data:
message = 'Instrument %s has invalid data: \'%r\'.' % (rd, data)
raise Exception(message)
"""
# Get and check eol from direct_config dictionary
eol = None
if 'eol' in direct_config:
eol = direct_config['eol']
if eol is None or not eol:
message = 'Instrument %s has invalid or empty eol: \'%r\'.' % (rd, eol)
raise Exception(message)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# If processing a command_request, get remaining items for processing
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if using_command_request:
# Verify input_dict is in direct_config
if 'input_dict' not in direct_config:
message = 'Instrument %s has missing direct access input_dict dictionary.' % rd
raise Exception(message)
# Get command_request_values; verify command_request in list and therefore valid.
command_request_values = direct_config['input_dict']
if command_request not in command_request_values:
message = 'Instrument %s direct access command %s not found in direct_config.' % rd
raise Exception(message)
# Get command_request_value from input_dict provided.
command_request_value = command_request_values[command_request]
# Was the requested title located in the direct_config? If not, error.
if not located_requested_item:
message = 'Instrument %s did not have a matching title \'%s\' in direct access direct_config.' % (rd, title)
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Prepare command value to send to instrument.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if using_command_request:
# Using command button value
command_value = command_request_value
else:
# If not using command button, , prepare command_value using the command_text
try:
command_value = ast.literal_eval(TRIPS + command_text + TRIPS)
if eol:
command_value += eol
except Exception as err:
message = 'Exception processing command value (literal_eval): %s' % str(err)
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Execute - direct access command.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
try:
#send_command(rd, command_value, ip, data)
new_send_command(rd, command_value)
except Exception as err:
message = 'Exception processing direct access command: %s' % str(err)
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Final - Verify _state and _capabilities match expected state and capabilities.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
verify_state_and_capabilities(rd, _state, _capabilities,
expected_state=target_state,
expected_capability=NOT_NONE)
return jsonify(result)
except Exception as err:
message = '(%s) direct access execute exception: %s' % (rd, err.message)
print '\n exception: message: ', message
current_app.logger.info(message)
return bad_request(err.message)
# Direct Access exit
@api.route('/c2/instrument/<string:reference_designator>/direct_access/exit', methods=['POST', 'GET'])
@auth.login_required
@scope_required(u'command_control')
def c2_direct_access_exit(reference_designator):
""" Exit direct access, transition to instrument driver state. If error, raise exception.
Exit 'DRIVER_STATE_DIRECT_ACCESS', execute command 'DRIVER_EVENT_STOP_DIRECT'.
"""
debug = False
rd = reference_designator
state_DRIVER_STATE_DIRECT_ACCESS = 'DRIVER_STATE_DIRECT_ACCESS'
capability_DRIVER_EVENT_STOP_DIRECT = 'DRIVER_EVENT_STOP_DIRECT'
try:
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Prepare to execute - direct access start command
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Validate reference_designator, get status and capabilities
_state, _capabilities, result = direct_access_get_state_and_capabilities(rd)
# If current state is not in the state_DRIVER_STATE_DIRECT_ACCESS, then return status result
# Log request to exit direct access state when not in direct access.
if _state != state_DRIVER_STATE_DIRECT_ACCESS:
message = 'Request to exit direct access for instrument %s, when in driver state %s' % (rd, _state)
current_app.logger.info(message)
return jsonify(result)
# Verify current _state and _capabilities match expected state and capabilities
verify_state_and_capabilities(rd, _state, _capabilities,
expected_state=state_DRIVER_STATE_DIRECT_ACCESS,
expected_capability=capability_DRIVER_EVENT_STOP_DIRECT)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Execute driver command 'DRIVER_EVENT_STOP_DIRECT' on upstream server
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
suffix = 'command=%22DRIVER_EVENT_STOP_DIRECT%22&timeout=60000'
response = uframe_post_instrument_driver_command(reference_designator, 'execute', suffix)
if response.status_code != 200:
message = '(%s) execute %s failed.' % (str(response.status_code), capability_DRIVER_EVENT_STOP_DIRECT)
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify command execution status by reviewing error information returned from instrument driver
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if response.content:
try:
response_data = json.loads(response.content)
except Exception:
raise Exception('Direct access exit command - malformed response data; invalid json format.')
# Evaluate response content for error (review 'value' list info in response_data )
if response_data:
status_code, status_type, status_message = _eval_POST_response_data(response_data, "")
#- - - - - - - - - - - - - - - - - - - - - - - - - -
if debug:
print '\n direct_access EXIT - response_data: ', json.dumps(response_data, indent=4, sort_keys=True)
print '\n direct_access EXIT - status_code: ', status_code
if status_code != 200:
print '\n direct_access EXIT - status_message: ', status_message
#- - - - - - - - - - - - - - - - - - - - - - - - - -
if status_code != 200:
raise Exception(status_message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Final - Verify _state has changed from state_DRIVER_STATE_DIRECT_ACCESS, if not error
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get state, capabilities and response result for reference_designator
_state, _capabilities, result = direct_access_get_state_and_capabilities(rd)
if _state == state_DRIVER_STATE_DIRECT_ACCESS:
message = 'The current state is \'DRIVER_STATE_DIRECT_ACCESS\'; failed to exit direct access.'
raise Exception(message)
return jsonify(result)
except Exception as err:
message = '(%s) direct access exit exception: %s' % (rd, err.message)
current_app.logger.info(message)
return bad_request(err.message)
# Direct Access sniffer
@api.route('/c2/instrument/<string:reference_designator>/direct_access/sniffer', methods=['POST', 'GET'])
@auth.login_required
@scope_required(u'command_control')
def c2_direct_access_sniffer(reference_designator):
""" Sniff port/ip/title for data, return data
Sample request:
http://localhost:4000/c2/instrument/RS10ENGC-XX00X-00-FLORDD001/direct_access/sniffer
(using hardcoded message: message = '{"ip": "128.6.240.37", "port": 54366}' )
Sample response:
{
"msg": "3671820966.2507 : PA_HEARTBEAT : CRC OK : 'HB'\n"
}
curl -H "Content-Type: application/json" -X POST --upload-file post_sniff_flord.txt http://localhost:4000/c2/instrument/RS10ENGC-XX00X-00-FLORDD001/direct_access/sniffer
curl -H "Content-Type: application/json" -X POST --upload-file post_sniff_vadcp_1.txt http://localhost:4000/c2/instrument/RS10ENGC-XX00X-00-VADCPA011/direct_access/sniffer
"""
# VADCP
#message = '{"ip": "128.6.240.37", "port": 34868, "title": "Beams 1-4"}'
#message = '{"ip": "128.6.240.37", "port": 48989, "title": "5th Beam"}'
# FLORD
#message = '{"ip": "128.6.240.37", "port": 54366, "title": "FLOR"}'
#message = '{"ip": "128.6.240.37", "port": 54366}'
# {"ip": "128.6.240.37", "port": 54366}
_data = None
rd = reference_designator
required_variables = ['ip', 'port', 'title']
try:
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get request data, process required items.
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if not request.data:
message = 'Direct access sniffer requires request data for POST.'
raise Exception(message)
# Get request data and process
request_data = json.loads(request.data)
if request_data is None:
message = 'Direct access sniffer request data is None. (%s).' % rd
raise Exception(message)
# Verify required items are provided in request.data and not empty
for item in required_variables:
if item not in request_data:
message = 'Malformed direct access sniffer request, missing %s (%s).' % (item, rd)
raise Exception(message)
else:
if not request_data[item] or request_data[item] is None:
message = 'Malformed direct access sniffer request, %s is empty (%s).' % (item, rd)
raise Exception(message)
# Get ip, port and title
ip = request_data['ip']
port = request_data['port']
#title = request_data['title']
# Issue request to sniffer process
s = None
_data = "Sniffer Connection Failed\r\n"
try:
s = sock.socket(sock.AF_INET, sock.SOCK_STREAM)
s.connect((ip, port))
try:
_data = s.recv(4096)
except Exception:
_data = "Error Receiving Data\r\n"
pass
if s is not None:
s.close()
except Exception:
if s is not None:
s.close()
pass
return jsonify(msg=_data)
except Exception as err:
message = '(%s) direct access exception: %s' % (reference_designator, err.message)
current_app.logger.info(message)
return bad_request(err.message)
#==================================================================
# SUPPORTING FUNCTIONS...
#==================================================================
def direct_access_get_state_and_capabilities(reference_designator):
""" Get current state and capabilities information for an instrument.
Overview:
Get instrument status
Get state from resulting status
Get capabilities from resulting status
Add 'direct_access_buttons' dictionary to _status
Return state, capabilities and _status
"""
state = None
capabilities = []
try:
# Get instrument status.
try:
_status = _c2_get_instrument_driver_status(reference_designator)
except Exception as err:
message = err.message
raise Exception(message)
if _status is None:
message = 'Instrument (%s) status failed to .' % reference_designator
raise Exception(message)
# Verify state is DRIVER_STATE_COMMAND, otherwise raise exception
if 'value' in _status:
if 'state' in _status['value']:
state = _status['value']['state']
# Verify capabilities
if 'value' in _status:
if 'capabilities' in _status['value']:
capabilities = _status['value']['capabilities'][0]
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get 'direct_access_buttons' (list of button names for direct access)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
try:
direct_config = None
if _status['value']['direct_config']:
direct_config = _status['value']['direct_config']
temp = {}
if direct_config:
temp = get_direct_access_buttons(direct_config)
_status['direct_access_buttons'] = temp
except Exception:
_status['direct_access_buttons'] = {}
pass
return state, capabilities, _status
except Exception as err:
message = err.message
raise Exception(message)
def verify_state_and_capabilities(reference_designator, state, capabilities, expected_state, expected_capability):
""" Verify current state and capabilities match expected state and capability. Raise if not.
"""
NOT_NONE = 'NOT_NONE'
try:
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify state
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# If expected_state is None, raise exception
if expected_state is None:
message = 'Instrument (%s) not in %s, current state is %s.' % \
(reference_designator, expected_state, state)
raise Exception(message)
# Determine if we need to check the current state
if expected_state != NOT_NONE:
# If current state is not the state expected, raise exception
if state != expected_state:
message = 'Instrument (%s) not in %s state, current state is %s.' % \
(reference_designator, expected_state, state)
raise Exception(message)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Verify capability
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# If not capabilities provided in current state, raise exception
if not capabilities:
message = 'Instrument (%s) did not return capabilities; current state %s.' % \
(reference_designator, state)
raise Exception(message)
# If capabilities expected but unknown use NOT_NONE, there are no capabilities, raise exception
if expected_capability == NOT_NONE:
if not capabilities:
message = 'Instrument (%s) does not have any capabilities; current state %s.' % \
(reference_designator, state)
raise Exception(message)
# If expected capability not provided, raise exception
elif expected_capability not in capabilities:
message = 'Instrument (%s) does not have %s capability; current state %s.' % \
(reference_designator, expected_capability, state)
raise Exception(message)
except Exception:
raise
def send_command(rd, command, ip, data):
""" Send command to rd using ip and data [port]. Sample command: '$met\r\n'
"""
try:
c = sock.socket(sock.AF_INET, sock.SOCK_STREAM)
c.connect((ip, data))
content = command
c.sendall(content)
c.shutdown(sock.SHUT_WR)
c.close()
return
except ConnectionError:
message = 'ConnectionError for direct access during send_command.'
current_app.logger.info(message)
raise Exception(message)
except Timeout:
message = 'Timeout for direct access during send_command.'
current_app.logger.info(message)
raise Exception(message)
except Exception as err:
message = 'Instrument %s exception during send command %s. Error: %s' % (rd, command, str(err))
current_app.logger.info(message)
raise
def get_direct_access_buttons(direct_config):
""" Get READ_ONLY and IMMUTABLE display values for UI from instrument 'parameters' dictionary.
Sample Input:
"direct_config": [
{
"character_delay": 0.0,
"data": 40291,
"eol": "\r\n",
"input_dict": {
"Interrupt": "!!!!!",
"Print Menu": "$mnu\r\n",
"Print Metadata": "$met\r\n",
"Read Data": "$get\r\n",
"Restore Factory Defaults": "$rfd\r\n",
"Restore Settings": "$rls\r\n",
"Run Settings": "$run\r\n",
"Run Wiper": "$mvs\r\n",
"Save Settings": "$sto\r\n",
"Set Clock>": "$clk ",
"Set Date>": "$date \r\n",
"Set>": "set "
},
"ip": "uft20",
"sniffer": 60641,
"title": "FLOR"
}
],
. . .
Sample Output:
['Interrupt', 'Print Menu', 'Print Metadata', 'Read Data', 'Restore Factory Defaults',
'Restore Settings', 'Run Settings', 'Run Wiper', 'Save Settings', 'Set Clock>', 'Set Date>', 'Set>']
. . .
"""
result = []
try:
# If no direct_config, then return empty dict.
if not direct_config:
return result
# If direct_config does not have attribute 'input_dict', raise error.
if 'input_dict' not in direct_config[0]:
return result
# If direct_config attribute 'input_dict' is empty, raise error.
if not direct_config[0]['input_dict']:
return result
# Create list of direct access buttons
input_dict = direct_config[0]['input_dict']
result = input_dict.keys()
result.sort()
return result
except Exception as err:
current_app.logger.info(err.message)
raise
#========================================================================================
"""
GET http://host:12572/instrument/api/RS10ENGC-XX00X-00-PARADA001/lock
{ "locked-by": null }
Set Lock
POST http://host:12572/instrument/api/RS10ENGC-XX00X-00-PARADA001/lock {'key': 'jdoe@oceans.org'}
{ "locked-by": "jdoe@oceans.org" }
Release Lock
POST http://host:12572/instrument/api/RS10ENGC-XX00X-00-PARADA001/unlock
{ "locked-by": null}
"""
# Direct Access Instrument get lock
# todo enable auth and scope
@api.route('/c2/instrument/<string:reference_designator>/lock', methods=['GET'])
@auth.login_required
@scope_required(u'command_control')
def c2_instrument_get_lock(reference_designator):
""" Get instrument lock status.
"""
status = {}
debug = False
rd = reference_designator
try:
if debug: print '\n debug -- c2_instrument_get_lock entered...'
status = get_lock_status(rd)
return jsonify(status)
except Exception as err:
message = '(%s) exception: %s' % (reference_designator, err.message)
if debug: print '\n exception: ', message
current_app.logger.info(message)
return bad_request(err.message)
# todo enable auth and scope
@api.route('/c2/instrument/<string:reference_designator>/lock', methods=['POST'])
#@auth.login_required
#@scope_required(u'command_control')
def c2_instrument_lock(reference_designator):
""" Lock instrument.
"""
debug = True
rd = reference_designator
#status = {}
current_user_data = {"locked-by": "admin@ooi.rutgers.edu", "key": "admin@ooi.rutgers.edu"}
try:
if debug: print '\n debug -- c2_instrument_lock entered...'
try:
result = get_lock_status(rd)
if debug: print '\n result: ', result
except:
raise
payload = json.dumps(current_user_data)
print '\n payload: ', payload
response = uframe_post_instrument_lock(rd,'lock', payload)
if response.status_code != 201:
if debug: print '\n (%d) failed to post lock...' % response.status_code
message = 'Failed to lock instrument, status code: %d' % response.status_code
raise Exception(message)
# Determine result of POST
answer = None
if response.content:
try:
answer = json.loads(response.content)
except:
message = 'Failed to parse malformed json.'
if debug: print '\n exception: ', message
raise Exception(message)
if debug: print '\n answer: ', answer
# Check lock status
try:
status = get_lock_status(rd)
if debug: print '\n status: ', status
except:
raise
if debug: print '\n check status: ', status
return jsonify(status)
except Exception as err:
message = '(%s) exception: %s' % (reference_designator, err.message)
if debug: print '\n exception: ', message
current_app.logger.info(message)
return bad_request(err.message)
# todo enable auth and scope
@api.route('/c2/instrument/<string:reference_designator>/unlock', methods=['POST'])
#@auth.login_required
#@scope_required(u'command_control')
def c2_instrument_unlock(reference_designator):
""" Unlock instrument.
"""
debug = False
rd = reference_designator
status = {}
current_user_data = {"locked-by": None}
try:
if debug: print '\n debug -- c2_instrument_lock entered...'
try:
result = get_lock_status(rd)
print '\n result: ', result
except:
raise
payload = json.dumps(current_user_data)
print '\n payload: ', payload
response = uframe_post_instrument_lock(rd,'unlock', payload)
if response.status_code != 201:
print '\n (%d) failed to post lock...' % response.status_code
status = None
if response.content:
try:
status = json.loads(response.content)
except:
message = 'Failed to parse malformed json.'
print '\n exception: ', message
print '\n answer: ', status
return jsonify(status)
except Exception as err:
message = '(%s) exception: %s' % (reference_designator, err.message)
if debug: print '\n exception: ', message
current_app.logger.info(message)
return bad_request(err.message)
def _post_headers():
""" urlencoded values for uframe POST.
"""
return {"Content-Type": "application/x-www-form-urlencoded"}
def uframe_post_instrument_lock(reference_designator, command, data):
""" Return the uframe response of instrument driver command and suffix provided for POST.
Example of suffix = '?command=%22DRIVER_EVENT_STOP_AUTOSAMPLE%22&timeout=60000'
"""
debug = False
try:
uframe_url, timeout, timeout_read = get_c2_uframe_info()
url = "/".join([uframe_url, reference_designator, command])
#url = "?".join([url, suffix])
print '\n debug -- url: ', url
response = requests.post(url, data=data, timeout=(timeout, timeout_read), headers=_post_headers())
return response
except ConnectionError:
message = 'ConnectionError for post instrument driver command.'
current_app.logger.info(message)
raise Exception(message)
except Timeout:
message = 'Timeout for post instrument driver command.'
current_app.logger.info(message)
raise Exception(message)
except Exception:
raise
def uframe_get_instrument_lock(reference_designator):
""" Return the uframe response of /instrument/api/reference_designator/lock.
"""
try:
uframe_url, timeout, timeout_read = get_c2_uframe_info()
url = "/".join([uframe_url, reference_designator, 'lock'])
print '\n debug -- url: ', url
response = requests.get(url, timeout=(timeout, timeout_read))
return response
except ConnectionError:
message = 'ConnectionError for get instrument lock.'
current_app.logger.info(message)
raise Exception(message)
except Timeout:
message = 'Timeout for get instrument lock.'
current_app.logger.info(message)
raise Exception(message)
except Exception:
raise
def get_lock_status(rd):
""" Get instrument lock response, if error raise exception. Return dict of response.
Sample output:
{
"locked-by": null
}
"""
debug = True
status = {}
try:
# Get instrument lock status from uframe
response = uframe_get_instrument_lock(rd)
if response.status_code != 200:
message = '(%d) Failed to get instrument %s lock status.' % (response.status_code, rd)
raise Exception(message)
# If response.content is empty or None, raise exception.
if not response.content or response.content is None:
message = 'Get instrument %s lock response has empty or None response content.' % rd
raise Exception(message)
# Parse response.content into json; if error raise exception.
if response.content:
try:
status = json.loads(response.content)
except:
message = 'Get instrument %s lock response has malformed data; not in valid json format.' % rd
raise Exception(message)
# Verify required attribute 'locked-by' is provided in json
if status:
if 'locked-by' not in status:
message = 'required attribute \'locked-by\' missing from get instrument %s lock response.' % rd
raise Exception(message)
return status
except Exception as err:
message = err.message
current_app.logger.info(message)
raise Exception(message)
def get_c2_uframe_info(type='instrument'):
""" Returns uframe instrument/api specific configuration information. (port 12572)
"""
try:
timeout, timeout_read = get_uframe_timeout_info()
if type == 'instrument':
uframe_url = "".join([current_app.config['UFRAME_INST_URL'], current_app.config['UFRAME_INST_BASE']])
else:
uframe_url = "".join([current_app.config['UFRAME_INST_URL'], current_app.config['UFRAME_PLAT_BASE']])
return uframe_url, timeout, timeout_read
except ConnectionError:
message = 'ConnectionError for instrument/api configuration values.'
current_app.logger.info(message)
raise Exception(message)
except Timeout:
message = 'Timeout for instrument/api configuration values.'
current_app.logger.info(message)
raise Exception(message)
except Exception:
raise
# uframe timeout information
def get_uframe_timeout_info():
""" Get uframe timeout configuration information.
"""
try:
timeout = current_app.config['UFRAME_TIMEOUT_CONNECT']
timeout_read = current_app.config['UFRAME_TIMEOUT_READ']
return timeout, timeout_read
except:
message = 'Unable to locate UFRAME_TIMEOUT_CONNECT or UFRAME_TIMEOUT_READ in config file.'
current_app.logger.info(message)
raise Exception(message) | {
"content_hash": "8815827e96cb8e81632c6afb78e8367f",
"timestamp": "",
"source": "github",
"line_count": 1068,
"max_line_length": 175,
"avg_line_length": 42.52996254681648,
"alnum_prop": 0.532825503060191,
"repo_name": "Bobfrat/ooi-ui-services",
"id": "9f5a207ac092e8a9c4269d0855fc9dfca502c082",
"size": "45444",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ooiservices/app/main/c2_direct_access.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1503504"
}
],
"symlink_target": ""
} |
"""General audio exception class for exceptions thrown in the audio module."""
# ---------------------------------------------------------------------------
# AudioException class
# ---------------------------------------------------------------------------
class AudioException(Exception):
"""Exception returned by the audio module"""
| {
"content_hash": "a27510fe8cea1ee49b63c69736497c3e",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 78,
"avg_line_length": 43.125,
"alnum_prop": 0.40869565217391307,
"repo_name": "missionpinball/mpf-mc",
"id": "aee908513877bb39e0f9375de1e418c82ce908ab",
"size": "345",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "mpfmc/core/audio/audio_exception.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3434"
},
{
"name": "Cython",
"bytes": "44814"
},
{
"name": "Dockerfile",
"bytes": "1441"
},
{
"name": "Makefile",
"bytes": "262"
},
{
"name": "Python",
"bytes": "1198826"
},
{
"name": "Shell",
"bytes": "829"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from __future__ import absolute_import
from requests.exceptions import ConnectionError, SSLError
import logging
import os
import re
import yaml
import six
from ..project import Project
from ..service import ConfigError
from .docopt_command import DocoptCommand
from .utils import call_silently, is_mac, is_ubuntu
from .docker_client import docker_client
from . import verbose_proxy
from . import errors
from .. import __version__
log = logging.getLogger(__name__)
class Command(DocoptCommand):
base_dir = '.'
def dispatch(self, *args, **kwargs):
try:
super(Command, self).dispatch(*args, **kwargs)
except SSLError, e:
raise errors.UserError('SSL error: %s' % e)
except ConnectionError:
if call_silently(['which', 'docker']) != 0:
if is_mac():
raise errors.DockerNotFoundMac()
elif is_ubuntu():
raise errors.DockerNotFoundUbuntu()
else:
raise errors.DockerNotFoundGeneric()
elif call_silently(['which', 'boot2docker']) == 0:
raise errors.ConnectionErrorBoot2Docker()
else:
raise errors.ConnectionErrorGeneric(self.get_client().base_url)
def perform_command(self, options, handler, command_options):
if options['COMMAND'] == 'help':
# Skip looking up the compose file.
handler(None, command_options)
return
if 'FIG_FILE' in os.environ:
log.warn('The FIG_FILE environment variable is deprecated.')
log.warn('Please use COMPOSE_FILE instead.')
explicit_config_path = options.get('--file') or os.environ.get('COMPOSE_FILE') or os.environ.get('FIG_FILE')
project = self.get_project(
self.get_config_path(explicit_config_path),
project_name=options.get('--project-name'),
verbose=options.get('--verbose'))
handler(project, command_options)
def get_client(self, verbose=False):
client = docker_client()
if verbose:
version_info = six.iteritems(client.version())
log.info("Compose version %s", __version__)
log.info("Docker base_url: %s", client.base_url)
log.info("Docker version: %s",
", ".join("%s=%s" % item for item in version_info))
return verbose_proxy.VerboseProxy('docker', client)
return client
def get_config(self, config_path):
try:
with open(config_path, 'r') as fh:
return yaml.safe_load(fh)
except IOError as e:
raise errors.UserError(six.text_type(e))
def get_project(self, config_path, project_name=None, verbose=False):
try:
return Project.from_config(
self.get_project_name(config_path, project_name),
self.get_config(config_path),
self.get_client(verbose=verbose))
except ConfigError as e:
raise errors.UserError(six.text_type(e))
def get_project_name(self, config_path, project_name=None):
def normalize_name(name):
return re.sub(r'[^a-z0-9]', '', name.lower())
if 'FIG_PROJECT_NAME' in os.environ:
log.warn('The FIG_PROJECT_NAME environment variable is deprecated.')
log.warn('Please use COMPOSE_PROJECT_NAME instead.')
project_name = project_name or os.environ.get('COMPOSE_PROJECT_NAME') or os.environ.get('FIG_PROJECT_NAME')
if project_name is not None:
return normalize_name(project_name)
project = os.path.basename(os.path.dirname(os.path.abspath(config_path)))
if project:
return normalize_name(project)
return 'default'
def get_config_path(self, file_path=None):
if file_path:
return os.path.join(self.base_dir, file_path)
supported_filenames = [
'docker-compose.yml',
'docker-compose.yaml',
'fig.yml',
'fig.yaml',
]
def expand(filename):
return os.path.join(self.base_dir, filename)
candidates = [filename for filename in supported_filenames if os.path.exists(expand(filename))]
if len(candidates) == 0:
raise errors.ComposeFileNotFound(supported_filenames)
winner = candidates[0]
if len(candidates) > 1:
log.warning("Found multiple config files with supported names: %s", ", ".join(candidates))
log.warning("Using %s\n", winner)
if winner == 'docker-compose.yaml':
log.warning("Please be aware that .yml is the expected extension "
"in most cases, and using .yaml can cause compatibility "
"issues in future.\n")
if winner.startswith("fig."):
log.warning("%s is deprecated and will not be supported in future. "
"Please rename your config file to docker-compose.yml\n" % winner)
return expand(winner)
| {
"content_hash": "8985d0c9dbe92060c3789ce3864473b6",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 116,
"avg_line_length": 36.707142857142856,
"alnum_prop": 0.5958357657131738,
"repo_name": "czchen/debian-docker-compose",
"id": "67b77f31b57026b2a2c22800f139b59ddfbf9bb0",
"size": "5139",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "compose/cli/command.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "163160"
},
{
"name": "Shell",
"bytes": "11377"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import numpy as np
import neuroglancer
import neuroglancer.cli
from example import add_example_layers
if __name__ == '__main__':
ap = argparse.ArgumentParser()
neuroglancer.cli.add_server_arguments(ap)
args = ap.parse_args()
neuroglancer.cli.handle_server_arguments(args)
# Specifying a token disables credentials by default. Specify
# `allow_credentials=True` to allow credentials, but in that case you must
# specify a secure/ungessable token to avoid exposing the credentials.
viewer = neuroglancer.Viewer(token='mytoken')
with viewer.txn() as s:
a, b = add_example_layers(s)
print(viewer)
| {
"content_hash": "e03f19fce9973f36af6f1fb7d2155998",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 28.12,
"alnum_prop": 0.7155049786628734,
"repo_name": "google/neuroglancer",
"id": "c536036b156d40542542db4399ef4d00ec665d6d",
"size": "703",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/examples/example_fixed_token.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2086"
},
{
"name": "C++",
"bytes": "112497"
},
{
"name": "CMake",
"bytes": "3180"
},
{
"name": "CSS",
"bytes": "87569"
},
{
"name": "Dockerfile",
"bytes": "2376"
},
{
"name": "Go",
"bytes": "20299"
},
{
"name": "HTML",
"bytes": "727"
},
{
"name": "JavaScript",
"bytes": "52505"
},
{
"name": "Jupyter Notebook",
"bytes": "10455"
},
{
"name": "Python",
"bytes": "456817"
},
{
"name": "Shell",
"bytes": "3014"
},
{
"name": "TypeScript",
"bytes": "3424679"
}
],
"symlink_target": ""
} |
'''
Analysis class unit test
'''
import sys
sys.path.append('../src/')
import unittest
from data import Data
from account import Account
class AnalysisTest(unittest.TestCase):
def test_get_suggestion(self):
# suggestion function test here
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "85ea7f3a197bea5910c296e6f0f357e4",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 39,
"avg_line_length": 20,
"alnum_prop": 0.69,
"repo_name": "ryanshim/cpsc362coinbaseTradingBot",
"id": "fdc6e4edbe8d5bc3d9b4710bf8b3cbac4257c6fe",
"size": "300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_analysis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49338"
}
],
"symlink_target": ""
} |
"""Certbot main entry point."""
# pylint: disable=too-many-lines
from contextlib import contextmanager
import functools
import logging.handlers
import sys
from typing import Generator
from typing import IO
from typing import Iterable
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
import configobj
import josepy as jose
import zope.component
import zope.interface
from acme import errors as acme_errors
import certbot
from certbot import configuration
from certbot import crypto_util
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot._internal import account
from certbot._internal import cert_manager
from certbot._internal import cli
from certbot._internal import client
from certbot._internal import constants
from certbot._internal import eff
from certbot._internal import hooks
from certbot._internal import log
from certbot._internal import renewal
from certbot._internal import reporter
from certbot._internal import snap_config
from certbot._internal import storage
from certbot._internal import updater
from certbot._internal.display import obj as display_obj
from certbot._internal.display import util as internal_display_util
from certbot._internal.plugins import disco as plugins_disco
from certbot._internal.plugins import selection as plug_sel
from certbot.compat import filesystem
from certbot.compat import misc
from certbot.compat import os
from certbot.display import ops as display_ops
from certbot.display import util as display_util
from certbot.plugins import enhancements
USER_CANCELLED = ("User chose to cancel the operation and may "
"reinvoke the client.")
logger = logging.getLogger(__name__)
def _suggest_donation_if_appropriate(config):
"""Potentially suggest a donation to support Certbot.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:returns: `None`
:rtype: None
"""
assert config.verb != "renew"
if config.staging:
# --dry-run implies --staging
return
util.atexit_register(
display_util.notification,
"If you like Certbot, please consider supporting our work by:\n"
" * Donating to ISRG / Let's Encrypt: https://letsencrypt.org/donate\n"
" * Donating to EFF: https://eff.org/donate-le",
pause=False
)
def _get_and_save_cert(le_client, config, domains=None, certname=None, lineage=None):
"""Authenticate and enroll certificate.
This method finds the relevant lineage, figures out what to do with it,
then performs that action. Includes calls to hooks, various reports,
checks, and requests for user input.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param domains: List of domain names to get a certificate. Defaults to `None`
:type domains: `list` of `str`
:param certname: Name of new certificate. Defaults to `None`
:type certname: str
:param lineage: Certificate lineage object. Defaults to `None`
:type lineage: storage.RenewableCert
:returns: the issued certificate or `None` if doing a dry run
:rtype: storage.RenewableCert or None
:raises errors.Error: if certificate could not be obtained
"""
hooks.pre_hook(config)
try:
if lineage is not None:
# Renewal, where we already know the specific lineage we're
# interested in
display_util.notify(
"{action} for {domains}".format(
action="Simulating renewal of an existing certificate"
if config.dry_run else "Renewing an existing certificate",
domains=internal_display_util.summarize_domain_list(domains or lineage.names())
)
)
renewal.renew_cert(config, domains, le_client, lineage)
else:
# TREAT AS NEW REQUEST
assert domains is not None
display_util.notify(
"{action} for {domains}".format(
action="Simulating a certificate request" if config.dry_run else
"Requesting a certificate",
domains=internal_display_util.summarize_domain_list(domains)
)
)
lineage = le_client.obtain_and_enroll_certificate(domains, certname)
if lineage is False:
raise errors.Error("Certificate could not be obtained")
if lineage is not None:
hooks.deploy_hook(config, lineage.names(), lineage.live_dir)
finally:
hooks.post_hook(config)
return lineage
def _handle_unexpected_key_type_migration(config: configuration.NamespaceConfig,
cert: storage.RenewableCert) -> None:
"""
This function ensures that the user will not implicitly migrate an existing key
from one type to another in the situation where a certificate for that lineage
already exist and they have not provided explicitly --key-type and --cert-name.
:param config: Current configuration provided by the client
:param cert: Matching certificate that could be renewed
"""
if not cli.set_by_cli("key_type") or not cli.set_by_cli("certname"):
new_key_type = config.key_type.upper()
cur_key_type = cert.private_key_type.upper()
if new_key_type != cur_key_type:
msg = ('Are you trying to change the key type of the certificate named {0} '
'from {1} to {2}? Please provide both --cert-name and --key-type on '
'the command line confirm the change you are trying to make.')
msg = msg.format(cert.lineagename, cur_key_type, new_key_type)
raise errors.Error(msg)
def _handle_subset_cert_request(config: configuration.NamespaceConfig,
domains: List[str],
cert: storage.RenewableCert
) -> Tuple[str, Optional[storage.RenewableCert]]:
"""Figure out what to do if a previous cert had a subset of the names now requested
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param domains: List of domain names
:type domains: `list` of `str`
:param cert: Certificate object
:type cert: storage.RenewableCert
:returns: Tuple of (str action, cert_or_None) as per _find_lineage_for_domains_and_certname
action can be: "newcert" | "renew" | "reinstall"
:rtype: `tuple` of `str`
"""
_handle_unexpected_key_type_migration(config, cert)
existing = ", ".join(cert.names())
question = (
"You have an existing certificate that contains a portion of "
"the domains you requested (ref: {0}){br}{br}It contains these "
"names: {1}{br}{br}You requested these names for the new "
"certificate: {2}.{br}{br}Do you want to expand and replace this existing "
"certificate with the new certificate?"
).format(cert.configfile.filename,
existing,
", ".join(domains),
br=os.linesep)
if config.expand or config.renew_by_default or display_util.yesno(
question, "Expand", "Cancel", cli_flag="--expand", force_interactive=True):
return "renew", cert
display_util.notify(
"To obtain a new certificate that contains these names without "
"replacing your existing certificate for {0}, you must use the "
"--duplicate option.{br}{br}"
"For example:{br}{br}{1} --duplicate {2}".format(
existing,
cli.cli_command, " ".join(sys.argv[1:]),
br=os.linesep
))
raise errors.Error(USER_CANCELLED)
def _handle_identical_cert_request(config: configuration.NamespaceConfig,
lineage: storage.RenewableCert,
) -> Tuple[str, Optional[storage.RenewableCert]]:
"""Figure out what to do if a lineage has the same names as a previously obtained one
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param lineage: Certificate lineage object
:type lineage: storage.RenewableCert
:returns: Tuple of (str action, cert_or_None) as per _find_lineage_for_domains_and_certname
action can be: "newcert" | "renew" | "reinstall"
:rtype: `tuple` of `str`
"""
_handle_unexpected_key_type_migration(config, lineage)
if not lineage.ensure_deployed():
return "reinstall", lineage
if renewal.should_renew(config, lineage):
return "renew", lineage
if config.reinstall:
# Set with --reinstall, force an identical certificate to be
# reinstalled without further prompting.
return "reinstall", lineage
question = (
"You have an existing certificate that has exactly the same "
"domains or certificate name you requested and isn't close to expiry."
"{br}(ref: {0}){br}{br}What would you like to do?"
).format(lineage.configfile.filename, br=os.linesep)
if config.verb == "run":
keep_opt = "Attempt to reinstall this existing certificate"
elif config.verb == "certonly":
keep_opt = "Keep the existing certificate for now"
choices = [keep_opt,
"Renew & replace the certificate (may be subject to CA rate limits)"]
response = display_util.menu(question, choices,
default=0, force_interactive=True)
if response[0] == display_util.CANCEL:
# TODO: Add notification related to command-line options for
# skipping the menu for this case.
raise errors.Error(
"Operation canceled. You may re-run the client.")
if response[1] == 0:
return "reinstall", lineage
elif response[1] == 1:
return "renew", lineage
raise AssertionError('This is impossible')
def _find_lineage_for_domains(config, domains):
"""Determine whether there are duplicated names and how to handle
them (renew, reinstall, newcert, or raising an error to stop
the client run if the user chooses to cancel the operation when
prompted).
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param domains: List of domain names
:type domains: `list` of `str`
:returns: Two-element tuple containing desired new-certificate behavior as
a string token ("reinstall", "renew", or "newcert"), plus either
a RenewableCert instance or `None` if renewal shouldn't occur.
:rtype: `tuple` of `str` and :class:`storage.RenewableCert` or `None`
:raises errors.Error: If the user would like to rerun the client again.
"""
# Considering the possibility that the requested certificate is
# related to an existing certificate. (config.duplicate, which
# is set with --duplicate, skips all of this logic and forces any
# kind of certificate to be obtained with renewal = False.)
if config.duplicate:
return "newcert", None
# TODO: Also address superset case
ident_names_cert, subset_names_cert = cert_manager.find_duplicative_certs(config, domains)
# XXX ^ schoen is not sure whether that correctly reads the systemwide
# configuration file.
if ident_names_cert is None and subset_names_cert is None:
return "newcert", None
if ident_names_cert is not None:
return _handle_identical_cert_request(config, ident_names_cert)
elif subset_names_cert is not None:
return _handle_subset_cert_request(config, domains, subset_names_cert)
return None, None
def _find_cert(config, domains, certname):
"""Finds an existing certificate object given domains and/or a certificate name.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param domains: List of domain names
:type domains: `list` of `str`
:param certname: Name of certificate
:type certname: str
:returns: Two-element tuple of a boolean that indicates if this function should be
followed by a call to fetch a certificate from the server, and either a
RenewableCert instance or None.
:rtype: `tuple` of `bool` and :class:`storage.RenewableCert` or `None`
"""
action, lineage = _find_lineage_for_domains_and_certname(config, domains, certname)
if action == "reinstall":
logger.info("Keeping the existing certificate")
return (action != "reinstall"), lineage
def _find_lineage_for_domains_and_certname(config: configuration.NamespaceConfig,
domains: List[str],
certname: str
) -> Tuple[str, Optional[storage.RenewableCert]]:
"""Find appropriate lineage based on given domains and/or certname.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param domains: List of domain names
:type domains: `list` of `str`
:param certname: Name of certificate
:type certname: str
:returns: Two-element tuple containing desired new-certificate behavior as
a string token ("reinstall", "renew", or "newcert"), plus either
a RenewableCert instance or None if renewal should not occur.
:rtype: `tuple` of `str` and :class:`storage.RenewableCert` or `None`
:raises errors.Error: If the user would like to rerun the client again.
"""
if not certname:
return _find_lineage_for_domains(config, domains)
lineage = cert_manager.lineage_for_certname(config, certname)
if lineage:
if domains:
if set(cert_manager.domains_for_certname(config, certname)) != set(domains):
_handle_unexpected_key_type_migration(config, lineage)
_ask_user_to_confirm_new_names(config, domains, certname,
lineage.names()) # raises if no
return "renew", lineage
# unnecessarily specified domains or no domains specified
return _handle_identical_cert_request(config, lineage)
elif domains:
return "newcert", None
raise errors.ConfigurationError("No certificate with name {0} found. "
"Use -d to specify domains, or run certbot certificates to see "
"possible certificate names.".format(certname))
def _get_added_removed(after, before):
"""Get lists of items removed from `before`
and a lists of items added to `after`
"""
added = list(set(after) - set(before))
removed = list(set(before) - set(after))
added.sort()
removed.sort()
return added, removed
def _format_list(character, strings):
"""Format list with given character
"""
if not strings:
formatted = "{br}(None)"
else:
formatted = "{br}{ch} " + "{br}{ch} ".join(strings)
return formatted.format(
ch=character,
br=os.linesep
)
def _ask_user_to_confirm_new_names(config, new_domains, certname, old_domains):
"""Ask user to confirm update cert certname to contain new_domains.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param new_domains: List of new domain names
:type new_domains: `list` of `str`
:param certname: Name of certificate
:type certname: str
:param old_domains: List of old domain names
:type old_domains: `list` of `str`
:returns: None
:rtype: None
:raises errors.ConfigurationError: if cert name and domains mismatch
"""
if config.renew_with_new_domains:
return
added, removed = _get_added_removed(new_domains, old_domains)
msg = ("You are updating certificate {0} to include new domain(s): {1}{br}{br}"
"You are also removing previously included domain(s): {2}{br}{br}"
"Did you intend to make this change?".format(
certname,
_format_list("+", added),
_format_list("-", removed),
br=os.linesep))
if not display_util.yesno(msg, "Update certificate", "Cancel", default=True):
raise errors.ConfigurationError("Specified mismatched certificate name and domains.")
def _find_domains_or_certname(config, installer, question=None):
"""Retrieve domains and certname from config or user input.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param installer: Installer object
:type installer: interfaces.Installer
:param `str` question: Overriding default question to ask the user if asked
to choose from domain names.
:returns: Two-part tuple of domains and certname
:rtype: `tuple` of list of `str` and `str`
:raises errors.Error: Usage message, if parameters are not used correctly
"""
domains = None
certname = config.certname
# first, try to get domains from the config
if config.domains:
domains = config.domains
# if we can't do that but we have a certname, get the domains
# with that certname
elif certname:
domains = cert_manager.domains_for_certname(config, certname)
# that certname might not have existed, or there was a problem.
# try to get domains from the user.
if not domains:
domains = display_ops.choose_names(installer, question)
if not domains and not certname:
raise errors.Error("Please specify --domains, or --installer that "
"will help in domain names autodiscovery, or "
"--cert-name for an existing certificate name.")
return domains, certname
def _report_next_steps(config: configuration.NamespaceConfig, installer_err: Optional[errors.Error],
lineage: Optional[storage.RenewableCert],
new_or_renewed_cert: bool = True) -> None:
"""Displays post-run/certonly advice to the user about renewal and installation.
The output varies by runtime configuration and any errors encountered during installation.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param installer_err: The installer/enhancement error encountered, if any.
:type error: Optional[errors.Error]
:param lineage: The resulting certificate lineage from the issuance, if any.
:type lineage: Optional[storage.RenewableCert]
:param bool new_or_renewed_cert: Whether the verb execution resulted in a certificate
being saved (created or renewed).
"""
steps: List[str] = []
# If the installation or enhancement raised an error, show advice on trying again
if installer_err:
steps.append(
"The certificate was saved, but could not be installed (installer: "
f"{config.installer}). After fixing the error shown below, try installing it again "
f"by running:\n {cli.cli_command} install --cert-name "
f"{_cert_name_from_config_or_lineage(config, lineage)}"
)
# If a certificate was obtained or renewed, show applicable renewal advice
if new_or_renewed_cert:
if config.csr:
steps.append(
"Certificates created using --csr will not be renewed automatically by Certbot. "
"You will need to renew the certificate before it expires, by running the same "
"Certbot command again.")
elif _is_interactive_only_auth(config):
steps.append(
"This certificate will not be renewed automatically. Autorenewal of "
"--manual certificates requires the use of an authentication hook script "
"(--manual-auth-hook) but one was not provided. To renew this certificate, repeat "
f"this same {cli.cli_command} command before the certificate's expiry date."
)
elif not config.preconfigured_renewal:
steps.append(
"The certificate will need to be renewed before it expires. Certbot can "
"automatically renew the certificate in the background, but you may need "
"to take steps to enable that functionality. "
"See https://certbot.org/renewal-setup for instructions.")
if not steps:
return
# TODO: refactor ANSI escapes during https://github.com/certbot/certbot/issues/8848
(bold_on, bold_off) = [c if sys.stdout.isatty() and not config.quiet else '' \
for c in (util.ANSI_SGR_BOLD, util.ANSI_SGR_RESET)]
print(bold_on, '\n', 'NEXT STEPS:', bold_off, sep='')
for step in steps:
display_util.notify(f"- {step}")
# If there was an installer error, segregate the error output with a trailing newline
if installer_err:
print()
def _report_new_cert(config: configuration.NamespaceConfig, cert_path: Optional[str],
fullchain_path: Optional[str], key_path: Optional[str] = None) -> None:
"""Reports the creation of a new certificate to the user.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param cert_path: path to certificate
:type cert_path: str
:param fullchain_path: path to full chain
:type fullchain_path: str
:param key_path: path to private key, if available
:type key_path: str
:returns: `None`
:rtype: None
"""
if config.dry_run:
display_util.notify("The dry run was successful.")
return
assert cert_path and fullchain_path, "No certificates saved to report."
renewal_msg = ""
if config.preconfigured_renewal and not _is_interactive_only_auth(config):
renewal_msg = ("\nCertbot has set up a scheduled task to automatically renew this "
"certificate in the background.")
display_util.notify(
("\nSuccessfully received certificate.\n"
"Certificate is saved at: {cert_path}\n{key_msg}"
"This certificate expires on {expiry}.\n"
"These files will be updated when the certificate renews.{renewal_msg}{nl}").format(
cert_path=fullchain_path,
expiry=crypto_util.notAfter(cert_path).date(),
key_msg="Key is saved at: {}\n".format(key_path) if key_path else "",
renewal_msg=renewal_msg,
nl="\n" if config.verb == "run" else "" # Normalize spacing across verbs
)
)
def _is_interactive_only_auth(config: configuration.NamespaceConfig) -> bool:
""" Whether the current authenticator params only support interactive renewal.
"""
# --manual without --manual-auth-hook can never autorenew
if config.authenticator == "manual" and config.manual_auth_hook is None:
return True
return False
def _csr_report_new_cert(config: configuration.NamespaceConfig, cert_path: Optional[str],
chain_path: Optional[str], fullchain_path: Optional[str]):
""" --csr variant of _report_new_cert.
Until --csr is overhauled (#8332) this is transitional function to report the creation
of a new certificate using --csr.
TODO: remove this function and just call _report_new_cert when --csr is overhauled.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param str cert_path: path to cert.pem
:param str chain_path: path to chain.pem
:param str fullchain_path: path to fullchain.pem
"""
if config.dry_run:
display_util.notify("The dry run was successful.")
return
assert cert_path and fullchain_path, "No certificates saved to report."
expiry = crypto_util.notAfter(cert_path).date()
display_util.notify(
("\nSuccessfully received certificate.\n"
"Certificate is saved at: {cert_path}\n"
"Intermediate CA chain is saved at: {chain_path}\n"
"Full certificate chain is saved at: {fullchain_path}\n"
"This certificate expires on {expiry}.").format(
cert_path=cert_path, chain_path=chain_path,
fullchain_path=fullchain_path, expiry=expiry,
)
)
def _determine_account(config):
"""Determine which account to use.
If ``config.account`` is ``None``, it will be updated based on the
user input. Same for ``config.email``.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:returns: Account and optionally ACME client API (biproduct of new
registration).
:rtype: tuple of :class:`certbot._internal.account.Account` and :class:`acme.client.Client`
:raises errors.Error: If unable to register an account with ACME server
"""
def _tos_cb(terms_of_service):
if config.tos:
return True
msg = ("Please read the Terms of Service at {0}. You "
"must agree in order to register with the ACME "
"server. Do you agree?".format(terms_of_service))
result = display_util.yesno(msg, cli_flag="--agree-tos", force_interactive=True)
if not result:
raise errors.Error(
"Registration cannot proceed without accepting "
"Terms of Service.")
return None
account_storage = account.AccountFileStorage(config)
acme = None
if config.account is not None:
acc = account_storage.load(config.account)
else:
accounts = account_storage.find_all()
if len(accounts) > 1:
acc = display_ops.choose_account(accounts)
elif len(accounts) == 1:
acc = accounts[0]
else: # no account registered yet
if config.email is None and not config.register_unsafely_without_email:
config.email = display_ops.get_email()
try:
acc, acme = client.register(
config, account_storage, tos_cb=_tos_cb)
display_util.notify("Account registered.")
except errors.MissingCommandlineFlag:
raise
except errors.Error:
logger.debug("", exc_info=True)
raise errors.Error(
"Unable to register an account with ACME server")
config.account = acc.id
return acc, acme
def _delete_if_appropriate(config):
"""Does the user want to delete their now-revoked certs? If run in non-interactive mode,
deleting happens automatically.
:param config: parsed command line arguments
:type config: configuration.NamespaceConfig
:returns: `None`
:rtype: None
:raises errors.Error: If anything goes wrong, including bad user input, if an overlapping
archive dir is found for the specified lineage, etc ...
"""
attempt_deletion = config.delete_after_revoke
if attempt_deletion is None:
msg = ("Would you like to delete the certificate(s) you just revoked, "
"along with all earlier and later versions of the certificate?")
attempt_deletion = display_util.yesno(msg, yes_label="Yes (recommended)", no_label="No",
force_interactive=True, default=True)
if not attempt_deletion:
return
# config.cert_path must have been set
# config.certname may have been set
assert config.cert_path
if not config.certname:
config.certname = cert_manager.cert_path_to_lineage(config)
# don't delete if the archive_dir is used by some other lineage
archive_dir = storage.full_archive_path(
configobj.ConfigObj(
storage.renewal_file_for_certname(config, config.certname),
encoding='utf-8', default_encoding='utf-8'),
config, config.certname)
try:
cert_manager.match_and_check_overlaps(config, [lambda x: archive_dir],
lambda x: x.archive_dir, lambda x: x)
except errors.OverlappingMatchFound:
logger.warning("Not deleting revoked certificates due to overlapping archive dirs. "
"More than one certificate is using %s", archive_dir)
return
except Exception as e:
msg = ('config.default_archive_dir: {0}, config.live_dir: {1}, archive_dir: {2},'
'original exception: {3}')
msg = msg.format(config.default_archive_dir, config.live_dir, archive_dir, e)
raise errors.Error(msg)
cert_manager.delete(config)
def _init_le_client(config, authenticator, installer):
"""Initialize Let's Encrypt Client
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param authenticator: Acme authentication handler
:type authenticator: Optional[interfaces.Authenticator]
:param installer: Installer object
:type installer: interfaces.Installer
:returns: client: Client object
:rtype: client.Client
"""
if authenticator is not None:
# if authenticator was given, then we will need account...
acc, acme = _determine_account(config)
logger.debug("Picked account: %r", acc)
# XXX
#crypto_util.validate_key_csr(acc.key)
else:
acc, acme = None, None
return client.Client(config, acc, authenticator, installer, acme=acme)
def unregister(config, unused_plugins):
"""Deactivate account on server
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
account_storage = account.AccountFileStorage(config)
accounts = account_storage.find_all()
if not accounts:
return "Could not find existing account to deactivate."
prompt = ("Are you sure you would like to irrevocably deactivate "
"your account?")
wants_deactivate = display_util.yesno(prompt, yes_label='Deactivate', no_label='Abort',
default=True)
if not wants_deactivate:
return "Deactivation aborted."
acc, acme = _determine_account(config)
cb_client = client.Client(config, acc, None, None, acme=acme)
# delete on boulder
cb_client.acme.deactivate_registration(acc.regr)
account_files = account.AccountFileStorage(config)
# delete local account files
account_files.delete(config.account)
display_util.notify("Account deactivated.")
return None
def register(config, unused_plugins):
"""Create accounts on the server.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None` or a string indicating and error
:rtype: None or str
"""
# Portion of _determine_account logic to see whether accounts already
# exist or not.
account_storage = account.AccountFileStorage(config)
accounts = account_storage.find_all()
if accounts:
# TODO: add a flag to register a duplicate account (this will
# also require extending _determine_account's behavior
# or else extracting the registration code from there)
return ("There is an existing account; registration of a "
"duplicate account with this command is currently "
"unsupported.")
# _determine_account will register an account
_determine_account(config)
return None
def update_account(config, unused_plugins):
"""Modify accounts on the server.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None` or a string indicating and error
:rtype: None or str
"""
# Portion of _determine_account logic to see whether accounts already
# exist or not.
account_storage = account.AccountFileStorage(config)
accounts = account_storage.find_all()
if not accounts:
return "Could not find an existing account to update."
if config.email is None and not config.register_unsafely_without_email:
config.email = display_ops.get_email(optional=False)
acc, acme = _determine_account(config)
cb_client = client.Client(config, acc, None, None, acme=acme)
# Empty list of contacts in case the user is removing all emails
acc_contacts: Iterable[str] = ()
if config.email:
acc_contacts = ['mailto:' + email for email in config.email.split(',')]
# We rely on an exception to interrupt this process if it didn't work.
prev_regr_uri = acc.regr.uri
acc.regr = cb_client.acme.update_registration(acc.regr.update(
body=acc.regr.body.update(contact=acc_contacts)))
# A v1 account being used as a v2 account will result in changing the uri to
# the v2 uri. Since it's the same object on disk, put it back to the v1 uri
# so that we can also continue to use the account object with acmev1.
acc.regr = acc.regr.update(uri=prev_regr_uri)
account_storage.update_regr(acc, cb_client.acme)
if not config.email:
display_util.notify("Any contact information associated "
"with this account has been removed.")
else:
eff.prepare_subscription(config, acc)
display_util.notify("Your e-mail address was updated to {0}.".format(config.email))
return None
def _cert_name_from_config_or_lineage(config: configuration.NamespaceConfig,
lineage: Optional[storage.RenewableCert]) -> Optional[str]:
if lineage:
return lineage.lineagename
elif config.certname:
return config.certname
try:
cert_name = cert_manager.cert_path_to_lineage(config)
return cert_name
except errors.Error:
pass
return None
def _install_cert(config, le_client, domains, lineage=None):
"""Install a cert
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param le_client: Client object
:type le_client: client.Client
:param domains: List of domains
:type domains: `list` of `str`
:param lineage: Certificate lineage object. Defaults to `None`
:type lineage: storage.RenewableCert
:returns: `None`
:rtype: None
"""
path_provider = lineage if lineage else config
assert path_provider.cert_path is not None
le_client.deploy_certificate(domains, path_provider.key_path, path_provider.cert_path,
path_provider.chain_path, path_provider.fullchain_path)
le_client.enhance_config(domains, path_provider.chain_path)
def install(config, plugins):
"""Install a previously obtained cert in a server.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
# XXX: Update for renewer/RenewableCert
# FIXME: be consistent about whether errors are raised or returned from
# this function ...
try:
installer, _ = plug_sel.choose_configurator_plugins(config, plugins, "install")
except errors.PluginSelectionError as e:
return str(e)
custom_cert = (config.key_path and config.cert_path)
if not config.certname and not custom_cert:
certname_question = "Which certificate would you like to install?"
config.certname = cert_manager.get_certnames(
config, "install", allow_multiple=False,
custom_prompt=certname_question)[0]
if not enhancements.are_supported(config, installer):
raise errors.NotSupportedError("One ore more of the requested enhancements "
"are not supported by the selected installer")
# If cert-path is defined, populate missing (ie. not overridden) values.
# Unfortunately this can't be done in argument parser, as certificate
# manager needs the access to renewal directory paths
if config.certname:
config = _populate_from_certname(config)
elif enhancements.are_requested(config):
# Preflight config check
raise errors.ConfigurationError("One or more of the requested enhancements "
"require --cert-name to be provided")
if config.key_path and config.cert_path:
_check_certificate_and_key(config)
domains, _ = _find_domains_or_certname(config, installer)
le_client = _init_le_client(config, authenticator=None, installer=installer)
_install_cert(config, le_client, domains)
else:
raise errors.ConfigurationError("Path to certificate or key was not defined. "
"If your certificate is managed by Certbot, please use --cert-name "
"to define which certificate you would like to install.")
if enhancements.are_requested(config):
# In the case where we don't have certname, we have errored out already
lineage = cert_manager.lineage_for_certname(config, config.certname)
enhancements.enable(lineage, domains, installer, config)
return None
def _populate_from_certname(config):
"""Helper function for install to populate missing config values from lineage
defined by --cert-name."""
lineage = cert_manager.lineage_for_certname(config, config.certname)
if not lineage:
return config
if not config.key_path:
config.namespace.key_path = lineage.key_path
if not config.cert_path:
config.namespace.cert_path = lineage.cert_path
if not config.chain_path:
config.namespace.chain_path = lineage.chain_path
if not config.fullchain_path:
config.namespace.fullchain_path = lineage.fullchain_path
return config
def _check_certificate_and_key(config):
if not os.path.isfile(filesystem.realpath(config.cert_path)):
raise errors.ConfigurationError("Error while reading certificate from path "
"{0}".format(config.cert_path))
if not os.path.isfile(filesystem.realpath(config.key_path)):
raise errors.ConfigurationError("Error while reading private key from path "
"{0}".format(config.key_path))
def plugins_cmd(config, plugins):
"""List server software plugins.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
logger.debug("Expected interfaces: %s", config.ifaces)
ifaces = [] if config.ifaces is None else config.ifaces
filtered = plugins.visible().ifaces(ifaces)
logger.debug("Filtered plugins: %r", filtered)
notify = functools.partial(display_util.notification, pause=False)
if not config.init and not config.prepare:
notify(str(filtered))
return
filtered.init(config)
verified = filtered.verify(ifaces)
logger.debug("Verified plugins: %r", verified)
if not config.prepare:
notify(str(verified))
return
verified.prepare()
available = verified.available()
logger.debug("Prepared plugins: %s", available)
notify(str(available))
def enhance(config, plugins):
"""Add security enhancements to existing configuration
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
supported_enhancements = ["hsts", "redirect", "uir", "staple"]
# Check that at least one enhancement was requested on command line
oldstyle_enh = any(getattr(config, enh) for enh in supported_enhancements)
if not enhancements.are_requested(config) and not oldstyle_enh:
msg = ("Please specify one or more enhancement types to configure. To list "
"the available enhancement types, run:\n\n%s --help enhance\n")
logger.error(msg, cli.cli_command)
raise errors.MisconfigurationError("No enhancements requested, exiting.")
try:
installer, _ = plug_sel.choose_configurator_plugins(config, plugins, "enhance")
except errors.PluginSelectionError as e:
return str(e)
if not enhancements.are_supported(config, installer):
raise errors.NotSupportedError("One ore more of the requested enhancements "
"are not supported by the selected installer")
certname_question = ("Which certificate would you like to use to enhance "
"your configuration?")
config.certname = cert_manager.get_certnames(
config, "enhance", allow_multiple=False,
custom_prompt=certname_question)[0]
cert_domains = cert_manager.domains_for_certname(config, config.certname)
if config.noninteractive_mode:
domains = cert_domains
else:
domain_question = ("Which domain names would you like to enable the "
"selected enhancements for?")
domains = display_ops.choose_values(cert_domains, domain_question)
if not domains:
raise errors.Error("User cancelled the domain selection. No domains "
"defined, exiting.")
lineage = cert_manager.lineage_for_certname(config, config.certname)
if not config.chain_path:
config.chain_path = lineage.chain_path
if oldstyle_enh:
le_client = _init_le_client(config, authenticator=None, installer=installer)
le_client.enhance_config(domains, config.chain_path, redirect_default=False)
if enhancements.are_requested(config):
enhancements.enable(lineage, domains, installer, config)
return None
def rollback(config, plugins):
"""Rollback server configuration changes made during install.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
client.rollback(config.installer, config.checkpoints, config, plugins)
def update_symlinks(config, unused_plugins):
"""Update the certificate file family symlinks
Use the information in the config file to make symlinks point to
the correct archive directory.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
cert_manager.update_live_symlinks(config)
def rename(config, unused_plugins):
"""Rename a certificate
Use the information in the config file to rename an existing
lineage.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
cert_manager.rename_lineage(config)
def delete(config, unused_plugins):
"""Delete a certificate
Use the information in the config file to delete an existing
lineage.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
cert_manager.delete(config)
def certificates(config, unused_plugins):
"""Display information about certs configured with Certbot
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
cert_manager.certificates(config)
def revoke(config, unused_plugins: plugins_disco.PluginsRegistry) -> Optional[str]:
"""Revoke a previously obtained certificate.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None` or string indicating error in case of error
:rtype: None or str
"""
# For user-agent construction
config.installer = config.authenticator = None
if config.cert_path is None and config.certname:
# When revoking via --cert-name, take the cert path and server from renewalparams
lineage = storage.RenewableCert(
storage.renewal_file_for_certname(config, config.certname), config)
config.cert_path = lineage.cert_path
# --server takes priority over lineage.server
if lineage.server and not cli.set_by_cli("server"):
config.server = lineage.server
elif not config.cert_path or (config.cert_path and config.certname):
# intentionally not supporting --cert-path & --cert-name together,
# to avoid dealing with mismatched values
raise errors.Error("Error! Exactly one of --cert-path or --cert-name must be specified!")
if config.key_path is not None: # revocation by cert key
logger.debug("Revoking %s using certificate key %s",
config.cert_path, config.key_path)
crypto_util.verify_cert_matches_priv_key(config.cert_path, config.key_path)
with open(config.key_path, 'rb') as f:
key = jose.JWK.load(f.read())
acme = client.acme_from_config_key(config, key)
else: # revocation by account key
logger.debug("Revoking %s using Account Key", config.cert_path)
acc, _ = _determine_account(config)
acme = client.acme_from_config_key(config, acc.key, acc.regr)
with open(config.cert_path, 'rb') as f:
cert = crypto_util.pyopenssl_load_certificate(f.read())[0]
logger.debug("Reason code for revocation: %s", config.reason)
try:
acme.revoke(jose.ComparableX509(cert), config.reason)
_delete_if_appropriate(config)
except acme_errors.ClientError as e:
return str(e)
display_ops.success_revocation(config.cert_path)
return None
def run(config, plugins):
"""Obtain a certificate and install.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
# TODO: Make run as close to auth + install as possible
# Possible difficulties: config.csr was hacked into auth
try:
installer, authenticator = plug_sel.choose_configurator_plugins(config, plugins, "run")
except errors.PluginSelectionError as e:
return str(e)
# Preflight check for enhancement support by the selected installer
if not enhancements.are_supported(config, installer):
raise errors.NotSupportedError("One ore more of the requested enhancements "
"are not supported by the selected installer")
# TODO: Handle errors from _init_le_client?
le_client = _init_le_client(config, authenticator, installer)
domains, certname = _find_domains_or_certname(config, installer)
should_get_cert, lineage = _find_cert(config, domains, certname)
new_lineage = lineage
if should_get_cert:
new_lineage = _get_and_save_cert(le_client, config, domains,
certname, lineage)
cert_path = new_lineage.cert_path if new_lineage else None
fullchain_path = new_lineage.fullchain_path if new_lineage else None
key_path = new_lineage.key_path if new_lineage else None
if should_get_cert:
_report_new_cert(config, cert_path, fullchain_path, key_path)
# The installer error, if any, is being stored as a value here, in order to first print
# relevant advice in a nice way, before re-raising the error for normal processing.
installer_err: Optional[errors.Error] = None
try:
_install_cert(config, le_client, domains, new_lineage)
if enhancements.are_requested(config) and new_lineage:
enhancements.enable(new_lineage, domains, installer, config)
if lineage is None or not should_get_cert:
display_ops.success_installation(domains)
else:
display_ops.success_renewal(domains)
except errors.Error as e:
installer_err = e
finally:
_report_next_steps(config, installer_err, new_lineage,
new_or_renewed_cert=should_get_cert)
# If the installer did fail, re-raise the error to bail out
if installer_err:
raise installer_err
_suggest_donation_if_appropriate(config)
eff.handle_subscription(config, le_client.account)
return None
def _csr_get_and_save_cert(config: configuration.NamespaceConfig,
le_client: client.Client) -> Tuple[
Optional[str], Optional[str], Optional[str]]:
"""Obtain a cert using a user-supplied CSR
This works differently in the CSR case (for now) because we don't
have the privkey, and therefore can't construct the files for a lineage.
So we just save the cert & chain to disk :/
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param client: Client object
:type client: client.Client
:returns: `cert_path`, `chain_path` and `fullchain_path` as absolute
paths to the actual files, or None for each if it's a dry-run.
:rtype: `tuple` of `str`
"""
csr, _ = config.actual_csr
csr_names = crypto_util.get_names_from_req(csr.data)
display_util.notify(
"{action} for {domains}".format(
action="Simulating a certificate request" if config.dry_run else
"Requesting a certificate",
domains=internal_display_util.summarize_domain_list(csr_names)
)
)
cert, chain = le_client.obtain_certificate_from_csr(csr)
if config.dry_run:
logger.debug(
"Dry run: skipping saving certificate to %s", config.cert_path)
return None, None, None
cert_path, chain_path, fullchain_path = le_client.save_certificate(
cert, chain, os.path.normpath(config.cert_path),
os.path.normpath(config.chain_path), os.path.normpath(config.fullchain_path))
return cert_path, chain_path, fullchain_path
def renew_cert(config, plugins, lineage):
"""Renew & save an existing cert. Do not install it.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:param lineage: Certificate lineage object
:type lineage: storage.RenewableCert
:returns: `None`
:rtype: None
:raises errors.PluginSelectionError: MissingCommandlineFlag if supplied parameters do not pass
"""
# installers are used in auth mode to determine domain names
installer, auth = plug_sel.choose_configurator_plugins(config, plugins, "certonly")
le_client = _init_le_client(config, auth, installer)
renewed_lineage = _get_and_save_cert(le_client, config, lineage=lineage)
if installer and not config.dry_run:
# In case of a renewal, reload server to pick up new certificate.
updater.run_renewal_deployer(config, renewed_lineage, installer)
display_util.notify(f"Reloading {config.installer} server after certificate renewal")
installer.restart()
def certonly(config, plugins):
"""Authenticate & obtain cert, but do not install it.
This implements the 'certonly' subcommand.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param plugins: List of plugins
:type plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
:raises errors.Error: If specified plugin could not be used
"""
# SETUP: Select plugins and construct a client instance
# installers are used in auth mode to determine domain names
installer, auth = plug_sel.choose_configurator_plugins(config, plugins, "certonly")
le_client = _init_le_client(config, auth, installer)
if config.csr:
cert_path, chain_path, fullchain_path = _csr_get_and_save_cert(config, le_client)
_csr_report_new_cert(config, cert_path, chain_path, fullchain_path)
_report_next_steps(config, None, None, new_or_renewed_cert=not config.dry_run)
_suggest_donation_if_appropriate(config)
eff.handle_subscription(config, le_client.account)
return
domains, certname = _find_domains_or_certname(config, installer)
should_get_cert, lineage = _find_cert(config, domains, certname)
if not should_get_cert:
display_util.notification("Certificate not yet due for renewal; no action taken.",
pause=False)
return
lineage = _get_and_save_cert(le_client, config, domains, certname, lineage)
cert_path = lineage.cert_path if lineage else None
fullchain_path = lineage.fullchain_path if lineage else None
key_path = lineage.key_path if lineage else None
_report_new_cert(config, cert_path, fullchain_path, key_path)
_report_next_steps(config, None, lineage,
new_or_renewed_cert=should_get_cert and not config.dry_run)
_suggest_donation_if_appropriate(config)
eff.handle_subscription(config, le_client.account)
def renew(config, unused_plugins):
"""Renew previously-obtained certificates.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:param unused_plugins: List of plugins (deprecated)
:type unused_plugins: plugins_disco.PluginsRegistry
:returns: `None`
:rtype: None
"""
try:
renewal.handle_renewal_request(config)
finally:
hooks.run_saved_post_hooks()
def make_or_verify_needed_dirs(config):
"""Create or verify existence of config, work, and hook directories.
:param config: Configuration object
:type config: configuration.NamespaceConfig
:returns: `None`
:rtype: None
"""
util.set_up_core_dir(config.config_dir, constants.CONFIG_DIRS_MODE, config.strict_permissions)
util.set_up_core_dir(config.work_dir, constants.CONFIG_DIRS_MODE, config.strict_permissions)
hook_dirs = (config.renewal_pre_hooks_dir,
config.renewal_deploy_hooks_dir,
config.renewal_post_hooks_dir,)
for hook_dir in hook_dirs:
util.make_or_verify_dir(hook_dir, strict=config.strict_permissions)
@contextmanager
def make_displayer(config: configuration.NamespaceConfig
) -> Generator[Union[display_util.NoninteractiveDisplay,
display_util.FileDisplay], None, None]:
"""Creates a display object appropriate to the flags in the supplied config.
:param config: Configuration object
:returns: Display object
"""
displayer: Union[None, display_util.NoninteractiveDisplay,
display_util.FileDisplay] = None
devnull: Optional[IO] = None
if config.quiet:
config.noninteractive_mode = True
devnull = open(os.devnull, "w") # pylint: disable=consider-using-with
displayer = display_util.NoninteractiveDisplay(devnull)
elif config.noninteractive_mode:
displayer = display_util.NoninteractiveDisplay(sys.stdout)
else:
displayer = display_util.FileDisplay(
sys.stdout, config.force_interactive)
try:
yield displayer
finally:
if devnull:
devnull.close()
def main(cli_args=None):
"""Run Certbot.
:param cli_args: command line to Certbot, defaults to ``sys.argv[1:]``
:type cli_args: `list` of `str`
:returns: value for `sys.exit` about the exit status of Certbot
:rtype: `str` or `int` or `None`
"""
if not cli_args:
cli_args = sys.argv[1:]
log.pre_arg_parse_setup()
if os.environ.get('CERTBOT_SNAPPED') == 'True':
cli_args = snap_config.prepare_env(cli_args)
plugins = plugins_disco.PluginsRegistry.find_all()
logger.debug("certbot version: %s", certbot.__version__)
logger.debug("Location of certbot entry point: %s", sys.argv[0])
# do not log `config`, as it contains sensitive data (e.g. revoke --key)!
logger.debug("Arguments: %r", cli_args)
logger.debug("Discovered plugins: %r", plugins)
# Some releases of Windows require escape sequences to be enable explicitly
misc.prepare_virtual_console()
# note: arg parser internally handles --help (and exits afterwards)
args = cli.prepare_and_parse_args(plugins, cli_args)
config = configuration.NamespaceConfig(args)
# This call is done only for retro-compatibility purposes.
# TODO: Remove this call once zope dependencies are removed from Certbot.
zope.component.provideUtility(config, interfaces.IConfig)
# On windows, shell without administrative right cannot create symlinks required by certbot.
# So we check the rights before continuing.
misc.raise_for_non_administrative_windows_rights()
try:
log.post_arg_parse_setup(config)
make_or_verify_needed_dirs(config)
except errors.Error:
# Let plugins_cmd be run as un-privileged user.
if config.func != plugins_cmd: # pylint: disable=comparison-with-callable
raise
# These calls are done only for retro-compatibility purposes.
# TODO: Remove these calls once zope dependencies are removed from Certbot.
report = reporter.Reporter(config)
zope.component.provideUtility(report, interfaces.IReporter)
util.atexit_register(report.print_messages)
with make_displayer(config) as displayer:
display_obj.set_display(displayer)
return config.func(config, plugins)
| {
"content_hash": "3a98b4efafee9f811631e2a3616ab8cd",
"timestamp": "",
"source": "github",
"line_count": 1569,
"max_line_length": 100,
"avg_line_length": 37.26449968132569,
"alnum_prop": 0.6641410686187316,
"repo_name": "stweil/letsencrypt",
"id": "cda08e1b7f01d0a288108a2e0feda12c6b16cb61",
"size": "58468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "certbot/certbot/_internal/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "14147"
},
{
"name": "Augeas",
"bytes": "4997"
},
{
"name": "Batchfile",
"bytes": "35037"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37309"
},
{
"name": "Nginx",
"bytes": "4270"
},
{
"name": "Python",
"bytes": "1355274"
},
{
"name": "Shell",
"bytes": "120566"
},
{
"name": "Standard ML",
"bytes": "256"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0005_remove_account_token_exp_time'),
]
operations = [
migrations.AddField(
model_name='account',
name='token_up_time',
field=models.DateTimeField(null=True, verbose_name='token-有效'),
),
]
| {
"content_hash": "f56863e6dbfc02e92a49f0051af683ce",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 75,
"avg_line_length": 23.333333333333332,
"alnum_prop": 0.6071428571428571,
"repo_name": "hyperwd/hwcram",
"id": "e174702cf1614f5d553e849aad308160d0ccd38f",
"size": "497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "account/migrations/0006_account_token_up_time.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "149073"
},
{
"name": "Shell",
"bytes": "126"
}
],
"symlink_target": ""
} |
__version__=''' $Id: renderPM.py 3959 2012-09-27 14:39:39Z robin $ '''
__doc__="""Render drawing objects in common bitmap formats
Usage::
from reportlab.graphics import renderPM
renderPM.drawToFile(drawing,filename,fmt='GIF',configPIL={....})
Other functions let you create a PM drawing as string or into a PM buffer.
Execute the script to see some test drawings."""
from reportlab.graphics.shapes import *
from reportlab.graphics.renderbase import StateTracker, getStateDelta, renderScaledDrawing
from reportlab.pdfbase.pdfmetrics import getFont, unicode2T1
from math import sin, cos, pi, ceil
from reportlab.lib.utils import getStringIO, open_and_read
from reportlab import rl_config
class RenderPMError(Exception):
pass
import string, os, sys
try:
import _renderPM
except ImportError, errMsg:
raise ImportError, "No module named _renderPM\n" + \
(str(errMsg)!='No module named _renderPM' and "it may be the wrong version or badly installed!" or
"see https://www.reportlab.com/software/opensource/rl-addons/")
def _getImage():
try:
from PIL import Image
except ImportError:
import Image
return Image
def Color2Hex(c):
#assert isinstance(colorobj, colors.Color) #these checks don't work well RGB
if c: return ((0xFF&int(255*c.red)) << 16) | ((0xFF&int(255*c.green)) << 8) | (0xFF&int(255*c.blue))
return c
# the main entry point for users...
def draw(drawing, canvas, x, y, showBoundary=rl_config._unset_):
"""As it says"""
R = _PMRenderer()
R.draw(renderScaledDrawing(drawing), canvas, x, y, showBoundary=showBoundary)
from reportlab.graphics.renderbase import Renderer
class _PMRenderer(Renderer):
"""This draws onto a pix map image. It needs to be a class
rather than a function, as some image-specific state tracking is
needed outside of the state info in the SVG model."""
def __init__(self):
self._tracker = StateTracker()
def pop(self):
self._tracker.pop()
self.applyState()
def push(self,node):
deltas = getStateDelta(node)
self._tracker.push(deltas)
self.applyState()
def applyState(self):
s = self._tracker.getState()
self._canvas.ctm = s['ctm']
self._canvas.strokeWidth = s['strokeWidth']
alpha = s['strokeOpacity']
if alpha is not None:
self._canvas.strokeOpacity = alpha
self._canvas.setStrokeColor(s['strokeColor'])
self._canvas.lineCap = s['strokeLineCap']
self._canvas.lineJoin = s['strokeLineJoin']
da = s['strokeDashArray']
if not da:
da = None
else:
if not isinstance(da,(list,tuple)):
da = da,
if len(da)!=2 or not isinstance(da[1],(list,tuple)):
da = 0, da #assume phase of 0
self._canvas.dashArray = da
alpha = s['fillOpacity']
if alpha is not None:
self._canvas.fillOpacity = alpha
self._canvas.setFillColor(s['fillColor'])
self._canvas.setFont(s['fontName'], s['fontSize'])
def initState(self,x,y):
deltas = STATE_DEFAULTS.copy()
deltas['transform'] = self._canvas._baseCTM[0:4]+(x,y)
self._tracker.push(deltas)
self.applyState()
def drawNode(self, node):
"""This is the recursive method called for each node
in the tree"""
#apply state changes
self.push(node)
#draw the object, or recurse
self.drawNodeDispatcher(node)
# restore the state
self.pop()
def drawRect(self, rect):
c = self._canvas
if rect.rx == rect.ry == 0:
#plain old rectangle, draw clockwise (x-axis to y-axis) direction
c.rect(rect.x,rect.y, rect.width, rect.height)
else:
c.roundRect(rect.x,rect.y, rect.width, rect.height, rect.rx, rect.ry)
def drawLine(self, line):
self._canvas.line(line.x1,line.y1,line.x2,line.y2)
def drawImage(self, image):
path = image.path
if isinstance(path,basestring):
if not (path and os.path.isfile(path)): return
im = _getImage().open(path).convert('RGB')
elif hasattr(path,'convert'):
im = path.convert('RGB')
else:
return
srcW, srcH = im.size
dstW, dstH = image.width, image.height
if dstW is None: dstW = srcW
if dstH is None: dstH = srcH
self._canvas._aapixbuf(
image.x, image.y, dstW, dstH,
im.tostring(), srcW, srcH, 3,
)
def drawCircle(self, circle):
c = self._canvas
c.circle(circle.cx,circle.cy, circle.r)
c.fillstrokepath()
def drawPolyLine(self, polyline, _doClose=0):
P = polyline.points
assert len(P) >= 2, 'Polyline must have 1 or more points'
c = self._canvas
c.pathBegin()
c.moveTo(P[0], P[1])
for i in range(2, len(P), 2):
c.lineTo(P[i], P[i+1])
if _doClose:
c.pathClose()
c.pathFill()
c.pathStroke()
def drawEllipse(self, ellipse):
c=self._canvas
c.ellipse(ellipse.cx, ellipse.cy, ellipse.rx,ellipse.ry)
c.fillstrokepath()
def drawPolygon(self, polygon):
self.drawPolyLine(polygon,_doClose=1)
def drawString(self, stringObj):
canv = self._canvas
fill = canv.fillColor
if fill is not None:
S = self._tracker.getState()
text_anchor = S['textAnchor']
fontName = S['fontName']
fontSize = S['fontSize']
text = stringObj.text
x = stringObj.x
y = stringObj.y
if not text_anchor in ['start','inherited']:
textLen = stringWidth(text, fontName,fontSize)
if text_anchor=='end':
x -= textLen
elif text_anchor=='middle':
x -= textLen/2
elif text_anchor=='numeric':
x -= numericXShift(text_anchor,text,textLen,fontName,fontSize,stringObj.encoding)
else:
raise ValueError, 'bad value for textAnchor '+str(text_anchor)
canv.drawString(x,y,text,_fontInfo=(fontName,fontSize))
def drawPath(self, path):
c = self._canvas
if path is EmptyClipPath:
del c._clipPaths[-1]
if c._clipPaths:
P = c._clipPaths[-1]
icp = P.isClipPath
P.isClipPath = 1
self.drawPath(P)
P.isClipPath = icp
else:
c.clipPathClear()
return
c.pathBegin()
drawFuncs = (c.moveTo, c.lineTo, c.curveTo, c.pathClose)
from reportlab.graphics.shapes import _renderPath
isClosed = _renderPath(path, drawFuncs)
if path.isClipPath:
c.clipPathSet()
c._clipPaths.append(path)
else:
if isClosed: c.pathFill()
c.pathStroke()
def _setFont(gs,fontName,fontSize):
try:
gs.setFont(fontName,fontSize)
except _renderPM.Error, errMsg:
if errMsg.args[0]!="Can't find font!": raise
#here's where we try to add a font to the canvas
try:
f = getFont(fontName)
if _renderPM._version<='0.98': #added reader arg in 0.99
_renderPM.makeT1Font(fontName,f.face.findT1File(),f.encoding.vector)
else:
_renderPM.makeT1Font(fontName,f.face.findT1File(),f.encoding.vector,open_and_read)
except:
s1, s2 = map(str,sys.exc_info()[:2])
raise RenderPMError, "Can't setFont(%s) missing the T1 files?\nOriginally %s: %s" % (fontName,s1,s2)
gs.setFont(fontName,fontSize)
def _convert2pilp(im):
Image = _getImage()
return im.convert("P", dither=Image.NONE, palette=Image.ADAPTIVE)
def _convert2pilL(im):
return im.convert("L")
def _convert2pil1(im):
return im.convert("1")
def _saveAsPICT(im,fn,fmt,transparent=None):
im = _convert2pilp(im)
cols, rows = im.size
#s = _renderPM.pil2pict(cols,rows,im.tostring(),im.im.getpalette(),transparent is not None and Color2Hex(transparent) or -1)
s = _renderPM.pil2pict(cols,rows,im.tostring(),im.im.getpalette())
if not hasattr(fn,'write'):
open(os.path.splitext(fn)[0]+'.'+string.lower(fmt),'wb').write(s)
if os.name=='mac':
from reportlab.lib.utils import markfilename
markfilename(fn,ext='PICT')
else:
fn.write(s)
BEZIER_ARC_MAGIC = 0.5522847498 #constant for drawing circular arcs w/ Beziers
class PMCanvas:
def __init__(self,w,h,dpi=72,bg=0xffffff,configPIL=None):
'''configPIL dict is passed to image save method'''
scale = dpi/72.0
w = int(w*scale+0.5)
h = int(h*scale+0.5)
self.__dict__['_gs'] = _renderPM.gstate(w,h,bg=bg)
self.__dict__['_bg'] = bg
self.__dict__['_baseCTM'] = (scale,0,0,scale,0,0)
self.__dict__['_clipPaths'] = []
self.__dict__['configPIL'] = configPIL
self.__dict__['_dpi'] = dpi
self.ctm = self._baseCTM
def _drawTimeResize(self,w,h,bg=None):
if bg is None: bg = self._bg
self._drawing.width, self._drawing.height = w, h
A = {'ctm':None, 'strokeWidth':None, 'strokeColor':None, 'lineCap':None, 'lineJoin':None, 'dashArray':None, 'fillColor':None}
gs = self._gs
fN,fS = gs.fontName, gs.fontSize
for k in A.keys():
A[k] = getattr(gs,k)
del gs, self._gs
gs = self.__dict__['_gs'] = _renderPM.gstate(w,h,bg=bg)
for k in A.keys():
setattr(self,k,A[k])
gs.setFont(fN,fS)
def toPIL(self):
im = _getImage().new('RGB', size=(self._gs.width, self._gs.height))
im.fromstring(self._gs.pixBuf)
return im
def saveToFile(self,fn,fmt=None):
im = self.toPIL()
if fmt is None:
if type(fn) is not StringType:
raise ValueError, "Invalid type '%s' for fn when fmt is None" % type(fn)
fmt = os.path.splitext(fn)[1]
if fmt.startswith('.'): fmt = fmt[1:]
configPIL = self.configPIL or {}
configPIL.setdefault('preConvertCB',None)
preConvertCB=configPIL.pop('preConvertCB')
if preConvertCB:
im = preConvertCB(im)
fmt = string.upper(fmt)
if fmt in ('GIF',):
im = _convert2pilp(im)
elif fmt in ('TIFF','TIFFP','TIFFL','TIF','TIFF1'):
if fmt.endswith('P'):
im = _convert2pilp(im)
elif fmt.endswith('L'):
im = _convert2pilL(im)
elif fmt.endswith('1'):
im = _convert2pil1(im)
fmt='TIFF'
elif fmt in ('PCT','PICT'):
return _saveAsPICT(im,fn,fmt,transparent=configPIL.get('transparent',None))
elif fmt in ('PNG','BMP', 'PPM'):
if fmt=='PNG':
try:
from PIL import PngImagePlugin
except ImportError:
import PngImagePlugin
elif fmt=='BMP':
try:
from PIL import BmpImagePlugin
except ImportError:
import BmpImagePlugin
elif fmt in ('JPG','JPEG'):
fmt = 'JPEG'
elif fmt in ('GIF',):
pass
else:
raise RenderPMError,"Unknown image kind %s" % fmt
if fmt=='TIFF':
tc = configPIL.get('transparent',None)
if tc:
from PIL import ImageChops, Image
T = 768*[0]
for o, c in zip((0,256,512), tc.bitmap_rgb()):
T[o+c] = 255
#if type(fn) is type(''): ImageChops.invert(im.point(T).convert('L').point(255*[0]+[255])).save(fn+'_mask.gif','GIF')
im = Image.merge('RGBA', im.split()+(ImageChops.invert(im.point(T).convert('L').point(255*[0]+[255])),))
#if type(fn) is type(''): im.save(fn+'_masked.gif','GIF')
for a,d in ('resolution',self._dpi),('resolution unit','inch'):
configPIL[a] = configPIL.get(a,d)
configPIL.setdefault('chops_invert',0)
if configPIL.pop('chops_invert'):
from PIL import ImageChops
im = ImageChops.invert(im)
configPIL.setdefault('preSaveCB',None)
preSaveCB=configPIL.pop('preSaveCB')
if preSaveCB:
im = preSaveCB(im)
im.save(fn,fmt,**configPIL)
if not hasattr(fn,'write') and os.name=='mac':
from reportlab.lib.utils import markfilename
markfilename(fn,ext=fmt)
def saveToString(self,fmt='GIF'):
s = getStringIO()
self.saveToFile(s,fmt=fmt)
return s.getvalue()
def _saveToBMP(self,f):
'''
Niki Spahiev, <niki@vintech.bg>, asserts that this is a respectable way to get BMP without PIL
f is a file like object to which the BMP is written
'''
import struct
gs = self._gs
pix, width, height = gs.pixBuf, gs.width, gs.height
f.write(struct.pack('=2sLLLLLLhh24x','BM',len(pix)+54,0,54,40,width,height,1,24))
rowb = width * 3
for o in range(len(pix),0,-rowb):
f.write(pix[o-rowb:o])
f.write( '\0' * 14 )
def setFont(self,fontName,fontSize,leading=None):
_setFont(self._gs,fontName,fontSize)
def __setattr__(self,name,value):
setattr(self._gs,name,value)
def __getattr__(self,name):
return getattr(self._gs,name)
def fillstrokepath(self,stroke=1,fill=1):
if fill: self.pathFill()
if stroke: self.pathStroke()
def _bezierArcSegmentCCW(self, cx,cy, rx,ry, theta0, theta1):
"""compute the control points for a bezier arc with theta1-theta0 <= 90.
Points are computed for an arc with angle theta increasing in the
counter-clockwise (CCW) direction. returns a tuple with starting point
and 3 control points of a cubic bezier curve for the curvto opertator"""
# Requires theta1 - theta0 <= 90 for a good approximation
assert abs(theta1 - theta0) <= 90
cos0 = cos(pi*theta0/180.0)
sin0 = sin(pi*theta0/180.0)
x0 = cx + rx*cos0
y0 = cy + ry*sin0
cos1 = cos(pi*theta1/180.0)
sin1 = sin(pi*theta1/180.0)
x3 = cx + rx*cos1
y3 = cy + ry*sin1
dx1 = -rx * sin0
dy1 = ry * cos0
#from pdfgeom
halfAng = pi*(theta1-theta0)/(2.0 * 180.0)
k = abs(4.0 / 3.0 * (1.0 - cos(halfAng) ) /(sin(halfAng)) )
x1 = x0 + dx1 * k
y1 = y0 + dy1 * k
dx2 = -rx * sin1
dy2 = ry * cos1
x2 = x3 - dx2 * k
y2 = y3 - dy2 * k
return ((x0,y0), ((x1,y1), (x2,y2), (x3,y3)) )
def bezierArcCCW(self, cx,cy, rx,ry, theta0, theta1):
"""return a set of control points for Bezier approximation to an arc
with angle increasing counter clockwise. No requirement on (theta1-theta0) <= 90
However, it must be true that theta1-theta0 > 0."""
# I believe this is also clockwise
# pretty much just like Robert Kern's pdfgeom.BezierArc
angularExtent = theta1 - theta0
# break down the arc into fragments of <=90 degrees
if abs(angularExtent) <= 90.0: # we just need one fragment
angleList = [(theta0,theta1)]
else:
Nfrag = int( ceil( abs(angularExtent)/90.) )
fragAngle = float(angularExtent)/ Nfrag # this could be negative
angleList = []
for ii in range(Nfrag):
a = theta0 + ii * fragAngle
b = a + fragAngle # hmm.. is I wonder if this is precise enought
angleList.append((a,b))
ctrlpts = []
for (a,b) in angleList:
if not ctrlpts: # first time
[(x0,y0), pts] = self._bezierArcSegmentCCW(cx,cy, rx,ry, a,b)
ctrlpts.append(pts)
else:
[(tmpx,tmpy), pts] = self._bezierArcSegmentCCW(cx,cy, rx,ry, a,b)
ctrlpts.append(pts)
return ((x0,y0), ctrlpts)
def addEllipsoidalArc(self, cx,cy, rx, ry, ang1, ang2):
"""adds an ellisesoidal arc segment to a path, with an ellipse centered
on cx,cy and with radii (major & minor axes) rx and ry. The arc is
drawn in the CCW direction. Requires: (ang2-ang1) > 0"""
((x0,y0), ctrlpts) = self.bezierArcCCW(cx,cy, rx,ry,ang1,ang2)
self.lineTo(x0,y0)
for ((x1,y1), (x2,y2),(x3,y3)) in ctrlpts:
self.curveTo(x1,y1,x2,y2,x3,y3)
def drawCentredString(self, x, y, text, text_anchor='middle'):
if self.fillColor is not None:
textLen = stringWidth(text, self.fontName,self.fontSize)
if text_anchor=='end':
x -= textLen
elif text_anchor=='middle':
x -= textLen/2.
elif text_anchor=='numeric':
x -= numericXShift(text_anchor,text,textLen,self.fontName,self.fontSize)
self.drawString(x,y,text)
def drawRightString(self, text, x, y):
self.drawCentredString(text,x,y,text_anchor='end')
def drawString(self, x, y, text, _fontInfo=None):
gs = self._gs
if _fontInfo:
fontName, fontSize = _fontInfo
else:
fontSize = gs.fontSize
fontName = gs.fontName
try:
gfont=getFont(gs.fontName)
except:
gfont = None
font = getFont(fontName)
if font._dynamicFont:
if isinstance(text,unicode): text = text.encode('utf8')
gs.drawString(x,y,text)
else:
fc = font
if not isinstance(text,unicode):
try:
text = text.decode('utf8')
except UnicodeDecodeError,e:
i,j = e.args[2:4]
raise UnicodeDecodeError(*(e.args[:4]+('%s\n%s-->%s<--%s' % (e.args[4],text[i-10:i],text[i:j],text[j:j+10]),)))
FT = unicode2T1(text,[font]+font.substitutionFonts)
n = len(FT)
nm1 = n-1
wscale = 0.001*fontSize
for i in xrange(n):
f, t = FT[i]
if f!=fc:
_setFont(gs,f.fontName,fontSize)
fc = f
gs.drawString(x,y,t)
if i!=nm1:
x += wscale*sum(map(f.widths.__getitem__,map(ord,t)))
if font!=fc:
_setFont(gs,fontName,fontSize)
def line(self,x1,y1,x2,y2):
if self.strokeColor is not None:
self.pathBegin()
self.moveTo(x1,y1)
self.lineTo(x2,y2)
self.pathStroke()
def rect(self,x,y,width,height,stroke=1,fill=1):
self.pathBegin()
self.moveTo(x, y)
self.lineTo(x+width, y)
self.lineTo(x+width, y + height)
self.lineTo(x, y + height)
self.pathClose()
self.fillstrokepath(stroke=stroke,fill=fill)
def roundRect(self, x, y, width, height, rx,ry):
"""rect(self, x, y, width, height, rx,ry):
Draw a rectangle if rx or rx and ry are specified the corners are
rounded with ellipsoidal arcs determined by rx and ry
(drawn in the counter-clockwise direction)"""
if rx==0: rx = ry
if ry==0: ry = rx
x2 = x + width
y2 = y + height
self.pathBegin()
self.moveTo(x+rx,y)
self.addEllipsoidalArc(x2-rx, y+ry, rx, ry, 270, 360 )
self.addEllipsoidalArc(x2-rx, y2-ry, rx, ry, 0, 90)
self.addEllipsoidalArc(x+rx, y2-ry, rx, ry, 90, 180)
self.addEllipsoidalArc(x+rx, y+ry, rx, ry, 180, 270)
self.pathClose()
self.fillstrokepath()
def circle(self, cx, cy, r):
"add closed path circle with center cx,cy and axes r: counter-clockwise orientation"
self.ellipse(cx,cy,r,r)
def ellipse(self, cx,cy,rx,ry):
"""add closed path ellipse with center cx,cy and axes rx,ry: counter-clockwise orientation
(remember y-axis increases downward) """
self.pathBegin()
# first segment
x0 = cx + rx # (x0,y0) start pt
y0 = cy
x3 = cx # (x3,y3) end pt of arc
y3 = cy-ry
x1 = cx+rx
y1 = cy-ry*BEZIER_ARC_MAGIC
x2 = x3 + rx*BEZIER_ARC_MAGIC
y2 = y3
self.moveTo(x0, y0)
self.curveTo(x1,y1,x2,y2,x3,y3)
# next segment
x0 = x3
y0 = y3
x3 = cx-rx
y3 = cy
x1 = cx-rx*BEZIER_ARC_MAGIC
y1 = cy-ry
x2 = x3
y2 = cy- ry*BEZIER_ARC_MAGIC
self.curveTo(x1,y1,x2,y2,x3,y3)
# next segment
x0 = x3
y0 = y3
x3 = cx
y3 = cy+ry
x1 = cx-rx
y1 = cy+ry*BEZIER_ARC_MAGIC
x2 = cx -rx*BEZIER_ARC_MAGIC
y2 = cy+ry
self.curveTo(x1,y1,x2,y2,x3,y3)
#last segment
x0 = x3
y0 = y3
x3 = cx+rx
y3 = cy
x1 = cx+rx*BEZIER_ARC_MAGIC
y1 = cy+ry
x2 = cx+rx
y2 = cy+ry*BEZIER_ARC_MAGIC
self.curveTo(x1,y1,x2,y2,x3,y3)
self.pathClose()
def saveState(self):
'''do nothing for compatibility'''
pass
def setFillColor(self,aColor):
self.fillColor = Color2Hex(aColor)
alpha = getattr(aColor,'alpha',None)
if alpha is not None:
self.fillOpacity = alpha
def setStrokeColor(self,aColor):
self.strokeColor = Color2Hex(aColor)
alpha = getattr(aColor,'alpha',None)
if alpha is not None:
self.strokeOpacity = alpha
restoreState = saveState
# compatibility routines
def setLineCap(self,cap):
self.lineCap = cap
def setLineJoin(self,join):
self.lineJoin = join
def setLineWidth(self,width):
self.strokeWidth = width
def drawToPMCanvas(d, dpi=72, bg=0xffffff, configPIL=None, showBoundary=rl_config._unset_):
d = renderScaledDrawing(d)
c = PMCanvas(d.width, d.height, dpi=dpi, bg=bg, configPIL=configPIL)
draw(d, c, 0, 0, showBoundary=showBoundary)
return c
def drawToPIL(d, dpi=72, bg=0xffffff, configPIL=None, showBoundary=rl_config._unset_):
return drawToPMCanvas(d, dpi=dpi, bg=bg, configPIL=configPIL, showBoundary=showBoundary).toPIL()
def drawToPILP(d, dpi=72, bg=0xffffff, configPIL=None, showBoundary=rl_config._unset_):
Image = _getImage()
im = drawToPIL(d, dpi=dpi, bg=bg, configPIL=configPIL, showBoundary=showBoundary)
return im.convert("P", dither=Image.NONE, palette=Image.ADAPTIVE)
def drawToFile(d,fn,fmt='GIF', dpi=72, bg=0xffffff, configPIL=None, showBoundary=rl_config._unset_):
'''create a pixmap and draw drawing, d to it then save as a file
configPIL dict is passed to image save method'''
c = drawToPMCanvas(d, dpi=dpi, bg=bg, configPIL=configPIL, showBoundary=showBoundary)
c.saveToFile(fn,fmt)
def drawToString(d,fmt='GIF', dpi=72, bg=0xffffff, configPIL=None, showBoundary=rl_config._unset_):
s = getStringIO()
drawToFile(d,s,fmt=fmt, dpi=dpi, bg=bg, configPIL=configPIL)
return s.getvalue()
save = drawToFile
def test(verbose=True):
def ext(x):
if x=='tiff': x='tif'
return x
#grab all drawings from the test module and write out.
#make a page of links in HTML to assist viewing.
import os
from reportlab.graphics import testshapes
getAllTestDrawings = testshapes.getAllTestDrawings
drawings = []
if not os.path.isdir('pmout'):
os.mkdir('pmout')
htmlTop = """<html><head><title>renderPM output results</title></head>
<body>
<h1>renderPM results of output</h1>
"""
htmlBottom = """</body>
</html>
"""
html = [htmlTop]
names = {}
argv = sys.argv[1:]
E = [a for a in argv if a.startswith('--ext=')]
if not E:
E = ['gif','tiff', 'png', 'jpg', 'pct', 'py', 'svg']
else:
for a in E:
argv.remove(a)
E = (','.join([a[6:] for a in E])).split(',')
#print in a loop, with their doc strings
for (drawing, docstring, name) in getAllTestDrawings(doTTF=hasattr(_renderPM,'ft_get_face')):
i = names[name] = names.setdefault(name,0)+1
if i>1: name += '.%02d' % (i-1)
if argv and name not in argv: continue
fnRoot = name
w = int(drawing.width)
h = int(drawing.height)
html.append('<hr><h2>Drawing %s</h2>\n<pre>%s</pre>' % (name, docstring))
for k in E:
if k in ['gif','png','jpg','pct']:
html.append('<p>%s format</p>\n' % string.upper(k))
try:
filename = '%s.%s' % (fnRoot, ext(k))
fullpath = os.path.join('pmout', filename)
if os.path.isfile(fullpath):
os.remove(fullpath)
if k=='pct':
from reportlab.lib.colors import white
drawToFile(drawing,fullpath,fmt=k,configPIL={'transparent':white})
elif k in ['py','svg']:
drawing.save(formats=['py','svg'],outDir='pmout',fnRoot=fnRoot)
else:
drawToFile(drawing,fullpath,fmt=k)
if k in ['gif','png','jpg']:
html.append('<img src="%s" border="1"><br>\n' % filename)
elif k=='py':
html.append('<a href="%s">python source</a><br>\n' % filename)
elif k=='svg':
html.append('<a href="%s">SVG</a><br>\n' % filename)
if verbose: print 'wrote',fullpath
except AttributeError:
print 'Problem drawing %s file'%k
raise
if os.environ.get('RL_NOEPSPREVIEW','0')=='1': drawing.__dict__['preview'] = 0
drawing.save(formats=['eps','pdf'],outDir='pmout',fnRoot=fnRoot)
html.append(htmlBottom)
htmlFileName = os.path.join('pmout', 'index.html')
open(htmlFileName, 'w').writelines(html)
if sys.platform=='mac':
from reportlab.lib.utils import markfilename
markfilename(htmlFileName,ext='HTML')
if verbose: print 'wrote %s' % htmlFileName
if __name__=='__main__':
test()
| {
"content_hash": "ea36fda010ad22a60d2ae2e5652caef3",
"timestamp": "",
"source": "github",
"line_count": 743,
"max_line_length": 133,
"avg_line_length": 36.738896366083445,
"alnum_prop": 0.5439059237278822,
"repo_name": "nickpack/reportlab",
"id": "b82779953dded5989079269e666d0afa28f5e511",
"size": "27470",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/reportlab/graphics/renderPM.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "782870"
},
{
"name": "C++",
"bytes": "1390"
},
{
"name": "Java",
"bytes": "6333"
},
{
"name": "Python",
"bytes": "3275159"
},
{
"name": "Shell",
"bytes": "1736"
}
],
"symlink_target": ""
} |
from .vscl import McAfeeVSCL
from ..interface import AntivirusPluginInterface
from irma.common.plugins import PluginMetaClass, PlatformDependency
from irma.common.base.utils import IrmaProbeType
class McAfeeVSCLPlugin(AntivirusPluginInterface, metaclass=PluginMetaClass):
# =================
# plugin metadata
# =================
_plugin_name_ = "McAfeeVSCL"
_plugin_display_name_ = McAfeeVSCL.name
_plugin_author_ = "IRMA (c) Quarkslab"
_plugin_version_ = "1.0.0"
_plugin_category_ = IrmaProbeType.antivirus
_plugin_description_ = "Plugin for McAfee VirusScan Command Line " \
"(VSCL) scanner on Linux"
_plugin_dependencies_ = [
PlatformDependency('linux')
]
# ================
# interface data
# ================
module_cls = McAfeeVSCL
| {
"content_hash": "2ede695d50207813e68f336241ddcaa4",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 76,
"avg_line_length": 29.06896551724138,
"alnum_prop": 0.6215895610913404,
"repo_name": "quarkslab/irma",
"id": "55bbbccaa544c55a10a9a8ac77ca0a2483ea53fe",
"size": "1368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "probe/modules/antivirus/mcafee/plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "79"
},
{
"name": "CSS",
"bytes": "86535"
},
{
"name": "DIGITAL Command Language",
"bytes": "68"
},
{
"name": "Gherkin",
"bytes": "2366"
},
{
"name": "HTML",
"bytes": "26577"
},
{
"name": "JavaScript",
"bytes": "1774854"
},
{
"name": "Jinja",
"bytes": "2672"
},
{
"name": "Less",
"bytes": "13774"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "PowerShell",
"bytes": "15660"
},
{
"name": "Python",
"bytes": "797592"
},
{
"name": "Shell",
"bytes": "61907"
}
],
"symlink_target": ""
} |
import json
import os
import subprocess
from .logger import Logger
log = Logger()
class TmuxException(Exception):
pass
class Tmux(object):
"""
Tmux controller
"""
def command(self, cmd, formats=None, many=False):
"""
Send custom Tmux command and return rich information
:param cmd:
:param formats:
:param many:
:return:
"""
cmd.insert(0, 'tmux')
if formats:
fmt = '{'
for key in formats:
fmt += ''.join(['"', key, '": ', '"#{', key, '}", '])
fmt = fmt[0:-2] + '}'
cmd.append('-F')
cmd.append(fmt)
try:
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait()
stdout = process.stdout.read()
process.stdout.close()
stderr = process.stderr.read()
process.stderr.close()
if stdout:
lines = ','.join(stdout.decode('utf_8').split('\n')) \
.rstrip(',')
stdout = json.loads('[' + lines + ']' if many else lines)
if stderr:
stderr = stderr.decode('utf_8').strip()
return stdout, stderr
except ValueError:
raise TmuxException('Unable to serialize Tmux\'s response, '
'please report bug.')
except Exception:
raise TmuxException('Unable to execute Tmux, aborting.')
def within_session(self):
"""
Returns true if current within a Tmux session
"""
return os.environ.get('TMUX') != ''
def has_session(self, session_name):
"""
Returns true if specified session currently exists
:param session_name: The session name to match
"""
try:
cmds = ['tmux', 'has-session', '-t', session_name]
# Compatibility code, python 2.x doesn't have subprocess.DEVNULL
with open(os.devnull, 'wb') as DEVNULL:
code = subprocess.check_call(cmds, stderr=DEVNULL)
except subprocess.CalledProcessError as e:
code = e.returncode
return code == 0
def new_session(self, session_name, win_name=''):
"""
Create a new Tmux session
:param session_name: New session's name
:param win_name: The window's name within the new session
:return: (session, window, pane)
"""
cmds = ['new-session', '-Pd', '-s', session_name]
if win_name:
cmds.extend(['-n', win_name])
output, errors = self.command(
cmds,
['session_id', 'session_name', 'session_windows',
'window_id', 'window_index', 'pane_index', 'pane_id'])
if errors:
raise TmuxException(errors)
session = {}
window = {}
pane = {}
for k, v in output.items():
short_name = k.split('_')[1]
if k.startswith('window_'):
window[short_name] = v
elif k.startswith('pane_'):
pane[short_name] = v
else:
session[short_name] = v
return session, window, pane
def new_window(self, session_name, win_name=''):
"""
Create a new Tmux window
:param session_name: Target session name
:param win_name: The new window's name
:return: (window, pane)
"""
cmds = ['new-window', '-Pd', '-t', session_name]
if win_name:
cmds.extend(['-n', win_name])
output, errors = self.command(
cmds,
['window_id', 'window_name', 'window_panes', 'window_active',
'window_index', 'window_layout', 'pane_index', 'pane_id'])
if errors:
raise Exception('Error creating window: {}'.format(errors))
window = {}
pane = {}
for k, v in output.items():
short_name = k.split('_')[1]
if k.startswith('pane_'):
pane[short_name] = v
else:
window[short_name] = v
return window, pane
def new_pane(self, session_name, window_id, pane_id):
"""
Create a new Tmux pane
:param session_name: Target session name
:param window_id: Window to split from
:param pane_id: Pane to split from
:return: Pane information
"""
output, errors = self.command(
['split-window', '-h', '-P', '-t',
'{}:{}.{}'.format(session_name, window_id, str(pane_id))],
['pane_id', 'pane_index', 'pane_active', 'pane_current_path',
'pane_start_command', 'pane_current_command', 'pane_title'])
if errors:
raise TmuxException(errors)
pane = {}
for k, v in output.items():
short_name = k.split('_')[1]
pane[short_name] = v
return pane
def kill_session(self, session_name):
"""
Kill a specified Tmux session
"""
return self.command(['kill-session', '-t', session_name])
def set_layout(self, session_name, win_name, layout=None):
"""
Sets a Tmux session's specific window to a different layout
:param session_name: Target session name
:param win_name: Target window name
:param layout: The layout name (even-horizontal, even-vertical,
main-horizontal, main-vertical, tiled)
"""
return self.command([
'select-layout', '-t',
'{}:{}'.format(session_name, win_name), layout or 'tiled'
])
def send_keys(self, session_name, win_name, pane_index, cmd, enter=True):
"""
Sends a Tmux session custom keys
:param session_name: Target session name
:param win_name: Target window name
:param pane_index: Target pane index
:param cmd: The string to enter
:param enter: Finish with a carriage-return?
"""
if cmd:
return self.command([
'send-keys', '-Rt',
'{}:{}.{}'.format(session_name, win_name, str(pane_index)),
cmd, 'C-m' if enter else ''
])
def attach(self, session_name):
"""
Attach to an existing Tmux session
:param session_name: Target session name
"""
if session_name:
cmd = 'switch-client' if self.within_session() \
else 'attach-session'
return self.command([cmd, '-t', session_name])
def get_windows(self, session_name):
"""
Retrieve information for all windows in a session
:param session_name: Target session name
"""
return self.command(
['list-windows', '-t', session_name],
['window_id', 'window_name', 'window_panes', 'window_active',
'window_index', 'window_layout'],
many=True)
def get_panes(self, session_name, window_name):
"""
Retrieve information for all panes in a window
:param session_name: Target session name
:param window_name: Target window name
"""
return self.command(
['list-panes', '-t', ':'.join([session_name, window_name])],
['pane_id', 'pane_active', 'pane_index', 'pane_start_command',
'pane_current_command', 'pane_current_path', 'pane_title'],
many=True)
| {
"content_hash": "85be68aa063f09f6e30a06dcd5b1309a",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 78,
"avg_line_length": 32.32905982905983,
"alnum_prop": 0.5187045604758758,
"repo_name": "rafi/mx",
"id": "c79e81276927f487ebf2186d1d648872297dedc5",
"size": "7589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mx/tmux.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "319"
},
{
"name": "Python",
"bytes": "30856"
}
],
"symlink_target": ""
} |
import concurrent.futures
import urllib.request
URLS = ['http://www.foxnews.com/',
'http://www.cnn.com/',
'http://europe.wsj.com/',
'http://www.bbc.co.uk/',
'http://dstcontrols.com/']
# Retrieve a single page and report the URL and contents
def load_url(url, timeout):
with urllib.request.urlopen(url, timeout=timeout) as conn:
return conn.read()
# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
# Start the load operations and mark each future with its URL
future_to_url = {executor.submit(load_url, url, 60): url for url in URLS}
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
data = future.result()
except Exception as exc:
print('%r generated an exception: %s' % (url, exc))
else:
print('%r page is %d bytes' % (url, len(data)))
| {
"content_hash": "ba1ed911373b7bf8d1de583b6dc94b31",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 77,
"avg_line_length": 38.53846153846154,
"alnum_prop": 0.6407185628742516,
"repo_name": "dstcontrols/osisoftpy",
"id": "8124d21c246aea0fe79f62265f89508bc6328e3e",
"size": "1821",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/concurrency_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "235835"
},
{
"name": "Shell",
"bytes": "2009"
}
],
"symlink_target": ""
} |
r'''
Creating the needed environments for creating the pre-compiled distribution on Windows:
See:
build_tools\pydevd_release_process.txt
for building binaries/release process.
'''
from __future__ import unicode_literals
import os
import subprocess
import sys
miniconda32_envs = os.getenv('MINICONDA32_ENVS', r'C:\tools\Miniconda32\envs')
miniconda64_envs = os.getenv('MINICONDA64_ENVS', r'C:\tools\Miniconda\envs')
python_installations = [
r'%s\py27_32\Scripts\python.exe' % miniconda32_envs,
r'%s\py35_32\Scripts\python.exe' % miniconda32_envs,
r'%s\py36_32\Scripts\python.exe' % miniconda32_envs,
r'%s\py37_32\Scripts\python.exe' % miniconda32_envs,
r'%s\py38_32\Scripts\python.exe' % miniconda32_envs,
r'%s\py27_64\Scripts\python.exe' % miniconda64_envs,
r'%s\py35_64\Scripts\python.exe' % miniconda64_envs,
r'%s\py36_64\Scripts\python.exe' % miniconda64_envs,
r'%s\py37_64\Scripts\python.exe' % miniconda64_envs,
r'%s\py38_64\Scripts\python.exe' % miniconda64_envs,
]
root_dir = os.path.dirname(os.path.dirname(__file__))
def list_binaries():
for f in os.listdir(os.path.join(root_dir, '_pydevd_bundle')):
if f.endswith('.pyd'):
yield f
def extract_version(python_install):
return python_install.split('\\')[-3][2:]
def main():
from generate_code import generate_dont_trace_files
from generate_code import generate_cython_module
# First, make sure that our code is up to date.
generate_dont_trace_files()
generate_cython_module()
for python_install in python_installations:
assert os.path.exists(python_install)
from build import remove_binaries
remove_binaries(['.pyd'])
for f in list_binaries():
raise AssertionError('Binary not removed: %s' % (f,))
for i, python_install in enumerate(python_installations):
new_name = 'pydevd_cython_%s_%s' % (sys.platform, extract_version(python_install))
args = [
python_install, os.path.join(root_dir, 'build_tools', 'build.py'), '--no-remove-binaries', '--target-pyd-name=%s' % new_name, '--force-cython']
if i != 0:
args.append('--no-regenerate-files')
version_number = extract_version(python_install)
if version_number.startswith('36') or version_number.startswith('37') or version_number.startswith('38'):
name_frame_eval = 'pydevd_frame_evaluator_%s_%s' % (sys.platform, extract_version(python_install))
args.append('--target-pyd-frame-eval=%s' % name_frame_eval)
print('Calling: %s' % (' '.join(args)))
subprocess.check_call(args)
if __name__ == '__main__':
main()
'''
To run do:
cd /D x:\PyDev.Debugger
set PYTHONPATH=x:\PyDev.Debugger
C:\tools\Miniconda32\envs\py27_32\python build_tools\build_binaries_windows.py
'''
| {
"content_hash": "4b4e04f216ca830a3e310825eacdbfad",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 155,
"avg_line_length": 32.51724137931034,
"alnum_prop": 0.6687875574407918,
"repo_name": "leafclick/intellij-community",
"id": "73b265a97544ad68a9db1cb46178c61abf8d7279",
"size": "2829",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/helpers/pydev/build_tools/build_binaries_windows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import datetime
from dogpile.cache import make_region
#: Expiration time for show caching
SHOW_EXPIRATION_TIME = datetime.timedelta(weeks=3).total_seconds()
#: Expiration time for episode caching
EPISODE_EXPIRATION_TIME = datetime.timedelta(days=3).total_seconds()
#: Expiration time for scraper searches
REFINER_EXPIRATION_TIME = datetime.timedelta(weeks=1).total_seconds()
region = make_region()
| {
"content_hash": "59be1cf6b2e1406fed19d23ad7441a9f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 69,
"avg_line_length": 26.933333333333334,
"alnum_prop": 0.7846534653465347,
"repo_name": "fernandog/subliminal",
"id": "244ba9531d2bf035b7dff7261e4ff5cefa6d1c09",
"size": "428",
"binary": false,
"copies": "38",
"ref": "refs/heads/develop",
"path": "subliminal/cache.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "327629"
}
],
"symlink_target": ""
} |
import json
from rest_framework.response import Response
from rest_framework.views import APIView
from cigar_example.restapi import urls
from rest_framework_docs.docs import DocumentationGenerator
class ApiDocumentation(APIView):
"""
Gets the documentation for the API endpoints
"""
def get(self, *args, **kwargs):
docs = DocumentationGenerator(urls.urlpatterns).get_docs()
return Response(json.loads(docs))
| {
"content_hash": "3d8bfc321da733f9f3333203aace47e2",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 66,
"avg_line_length": 26.176470588235293,
"alnum_prop": 0.7438202247191011,
"repo_name": "marcgibbons/django-rest-framework-docs",
"id": "220effb02d5ea85b81cbc9695aee846a2500a73f",
"size": "445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cigar_example/cigar_example/app/views.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "25929"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.