partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
Vector.step_towards
returns the vector moved one step in the direction of the other, potentially diagonally.
examples/robots.py
def step_towards(self, other): '''returns the vector moved one step in the direction of the other, potentially diagonally.''' return self + Vector( ( (self[0] < other[0]) - (self[0] > other[0]), (self[1] < other[1]) - (self[1] > other[1]), ) )
def step_towards(self, other): '''returns the vector moved one step in the direction of the other, potentially diagonally.''' return self + Vector( ( (self[0] < other[0]) - (self[0] > other[0]), (self[1] < other[1]) - (self[1] > other[1]), ) )
[ "returns", "the", "vector", "moved", "one", "step", "in", "the", "direction", "of", "the", "other", "potentially", "diagonally", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/robots.py#L58-L67
[ "def", "step_towards", "(", "self", ",", "other", ")", ":", "return", "self", "+", "Vector", "(", "(", "(", "self", "[", "0", "]", "<", "other", "[", "0", "]", ")", "-", "(", "self", "[", "0", "]", ">", "other", "[", "0", "]", ")", ",", "(", "self", "[", "1", "]", "<", "other", "[", "1", "]", ")", "-", "(", "self", "[", "1", "]", ">", "other", "[", "1", "]", ")", ",", ")", ")" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
GameState.handle_input
Takes a single character string as input and alters the game state according to that input. Mostly, this means moving the player around. Returns a new game state and boolean indicating whether the input had an effect on the state.
examples/robots.py
def handle_input(self, input): '''Takes a single character string as input and alters the game state according to that input. Mostly, this means moving the player around. Returns a new game state and boolean indicating whether the input had an effect on the state.''' dirs = { 'h': (-1, 0), 'j': (0, 1), 'k': (0, -1), 'l': (1, 0), 'y': (-1, -1), 'u': (1, -1), 'n': (1, 1), 'b': (-1, 1), } if input in dirs: new_self = (lens.player + dirs[input])(self) if not new_self.player.inside(): return self, False return new_self, True elif input == '.': return self, True elif input == 'q': return self.end_game(), False elif input == 't': self = lens.player.set(Vector.random())(self) return self, True else: return self, False
def handle_input(self, input): '''Takes a single character string as input and alters the game state according to that input. Mostly, this means moving the player around. Returns a new game state and boolean indicating whether the input had an effect on the state.''' dirs = { 'h': (-1, 0), 'j': (0, 1), 'k': (0, -1), 'l': (1, 0), 'y': (-1, -1), 'u': (1, -1), 'n': (1, 1), 'b': (-1, 1), } if input in dirs: new_self = (lens.player + dirs[input])(self) if not new_self.player.inside(): return self, False return new_self, True elif input == '.': return self, True elif input == 'q': return self.end_game(), False elif input == 't': self = lens.player.set(Vector.random())(self) return self, True else: return self, False
[ "Takes", "a", "single", "character", "string", "as", "input", "and", "alters", "the", "game", "state", "according", "to", "that", "input", ".", "Mostly", "this", "means", "moving", "the", "player", "around", ".", "Returns", "a", "new", "game", "state", "and", "boolean", "indicating", "whether", "the", "input", "had", "an", "effect", "on", "the", "state", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/robots.py#L81-L111
[ "def", "handle_input", "(", "self", ",", "input", ")", ":", "dirs", "=", "{", "'h'", ":", "(", "-", "1", ",", "0", ")", ",", "'j'", ":", "(", "0", ",", "1", ")", ",", "'k'", ":", "(", "0", ",", "-", "1", ")", ",", "'l'", ":", "(", "1", ",", "0", ")", ",", "'y'", ":", "(", "-", "1", ",", "-", "1", ")", ",", "'u'", ":", "(", "1", ",", "-", "1", ")", ",", "'n'", ":", "(", "1", ",", "1", ")", ",", "'b'", ":", "(", "-", "1", ",", "1", ")", ",", "}", "if", "input", "in", "dirs", ":", "new_self", "=", "(", "lens", ".", "player", "+", "dirs", "[", "input", "]", ")", "(", "self", ")", "if", "not", "new_self", ".", "player", ".", "inside", "(", ")", ":", "return", "self", ",", "False", "return", "new_self", ",", "True", "elif", "input", "==", "'.'", ":", "return", "self", ",", "True", "elif", "input", "==", "'q'", ":", "return", "self", ".", "end_game", "(", ")", ",", "False", "elif", "input", "==", "'t'", ":", "self", "=", "lens", ".", "player", ".", "set", "(", "Vector", ".", "random", "(", ")", ")", "(", "self", ")", "return", "self", ",", "True", "else", ":", "return", "self", ",", "False" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
GameState.advance_robots
Produces a new game state in which the robots have advanced towards the player by one step. Handles the robots crashing into one another too.
examples/robots.py
def advance_robots(self): '''Produces a new game state in which the robots have advanced towards the player by one step. Handles the robots crashing into one another too.''' # move the robots towards the player self = lens.robots.Each().call_step_towards(self.player)(self) # robots in the same place are crashes self = lens.crashes.call_union(duplicates(self.robots))(self) # remove crashed robots self = lens.robots.modify(lambda r: list(set(r) - self.crashes))(self) return self
def advance_robots(self): '''Produces a new game state in which the robots have advanced towards the player by one step. Handles the robots crashing into one another too.''' # move the robots towards the player self = lens.robots.Each().call_step_towards(self.player)(self) # robots in the same place are crashes self = lens.crashes.call_union(duplicates(self.robots))(self) # remove crashed robots self = lens.robots.modify(lambda r: list(set(r) - self.crashes))(self) return self
[ "Produces", "a", "new", "game", "state", "in", "which", "the", "robots", "have", "advanced", "towards", "the", "player", "by", "one", "step", ".", "Handles", "the", "robots", "crashing", "into", "one", "another", "too", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/robots.py#L113-L125
[ "def", "advance_robots", "(", "self", ")", ":", "# move the robots towards the player", "self", "=", "lens", ".", "robots", ".", "Each", "(", ")", ".", "call_step_towards", "(", "self", ".", "player", ")", "(", "self", ")", "# robots in the same place are crashes", "self", "=", "lens", ".", "crashes", ".", "call_union", "(", "duplicates", "(", "self", ".", "robots", ")", ")", "(", "self", ")", "# remove crashed robots", "self", "=", "lens", ".", "robots", ".", "modify", "(", "lambda", "r", ":", "list", "(", "set", "(", "r", ")", "-", "self", ".", "crashes", ")", ")", "(", "self", ")", "return", "self" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
GameState.check_game_end
Checks for the game's win/lose conditions and 'alters' the game state to reflect the condition found. If the game has not been won or lost then it just returns the game state unaltered.
examples/robots.py
def check_game_end(self): '''Checks for the game's win/lose conditions and 'alters' the game state to reflect the condition found. If the game has not been won or lost then it just returns the game state unaltered.''' if self.player in self.crashes.union(self.robots): return self.end_game('You Died!') elif not self.robots: return self.end_game('You Win!') else: return self
def check_game_end(self): '''Checks for the game's win/lose conditions and 'alters' the game state to reflect the condition found. If the game has not been won or lost then it just returns the game state unaltered.''' if self.player in self.crashes.union(self.robots): return self.end_game('You Died!') elif not self.robots: return self.end_game('You Win!') else: return self
[ "Checks", "for", "the", "game", "s", "win", "/", "lose", "conditions", "and", "alters", "the", "game", "state", "to", "reflect", "the", "condition", "found", ".", "If", "the", "game", "has", "not", "been", "won", "or", "lost", "then", "it", "just", "returns", "the", "game", "state", "unaltered", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/robots.py#L127-L138
[ "def", "check_game_end", "(", "self", ")", ":", "if", "self", ".", "player", "in", "self", ".", "crashes", ".", "union", "(", "self", ".", "robots", ")", ":", "return", "self", ".", "end_game", "(", "'You Died!'", ")", "elif", "not", "self", ".", "robots", ":", "return", "self", ".", "end_game", "(", "'You Win!'", ")", "else", ":", "return", "self" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
GameState.end_game
Returns a completed game state object, setting an optional message to display after the game is over.
examples/robots.py
def end_game(self, message=''): '''Returns a completed game state object, setting an optional message to display after the game is over.''' return lens.running.set(False)(lens.message.set(message)(self))
def end_game(self, message=''): '''Returns a completed game state object, setting an optional message to display after the game is over.''' return lens.running.set(False)(lens.message.set(message)(self))
[ "Returns", "a", "completed", "game", "state", "object", "setting", "an", "optional", "message", "to", "display", "after", "the", "game", "is", "over", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/robots.py#L140-L144
[ "def", "end_game", "(", "self", ",", "message", "=", "''", ")", ":", "return", "lens", ".", "running", ".", "set", "(", "False", ")", "(", "lens", ".", "message", ".", "set", "(", "message", ")", "(", "self", ")", ")" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
player_move
Shows the board to the player on the console and asks them to make a move.
examples/naughts_and_crosses.py
def player_move(board): '''Shows the board to the player on the console and asks them to make a move.''' print(board, end='\n\n') x, y = input('Enter move (e.g. 2b): ') print() return int(x) - 1, ord(y) - ord('a')
def player_move(board): '''Shows the board to the player on the console and asks them to make a move.''' print(board, end='\n\n') x, y = input('Enter move (e.g. 2b): ') print() return int(x) - 1, ord(y) - ord('a')
[ "Shows", "the", "board", "to", "the", "player", "on", "the", "console", "and", "asks", "them", "to", "make", "a", "move", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/naughts_and_crosses.py#L88-L94
[ "def", "player_move", "(", "board", ")", ":", "print", "(", "board", ",", "end", "=", "'\\n\\n'", ")", "x", ",", "y", "=", "input", "(", "'Enter move (e.g. 2b): '", ")", "print", "(", ")", "return", "int", "(", "x", ")", "-", "1", ",", "ord", "(", "y", ")", "-", "ord", "(", "'a'", ")" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
play
Play a game of naughts and crosses against the computer.
examples/naughts_and_crosses.py
def play(): 'Play a game of naughts and crosses against the computer.' ai = {'X': player_move, 'O': random_move} board = Board() while not board.winner: x, y = ai[board.player](board) board = board.make_move(x, y) print(board, end='\n\n') print(board.winner)
def play(): 'Play a game of naughts and crosses against the computer.' ai = {'X': player_move, 'O': random_move} board = Board() while not board.winner: x, y = ai[board.player](board) board = board.make_move(x, y) print(board, end='\n\n') print(board.winner)
[ "Play", "a", "game", "of", "naughts", "and", "crosses", "against", "the", "computer", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/naughts_and_crosses.py#L102-L110
[ "def", "play", "(", ")", ":", "ai", "=", "{", "'X'", ":", "player_move", ",", "'O'", ":", "random_move", "}", "board", "=", "Board", "(", ")", "while", "not", "board", ".", "winner", ":", "x", ",", "y", "=", "ai", "[", "board", ".", "player", "]", "(", "board", ")", "board", "=", "board", ".", "make_move", "(", "x", ",", "y", ")", "print", "(", "board", ",", "end", "=", "'\\n\\n'", ")", "print", "(", "board", ".", "winner", ")" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
Board.make_move
Return a board with a cell filled in by the current player. If the cell is already occupied then return the board unchanged.
examples/naughts_and_crosses.py
def make_move(self, x, y): '''Return a board with a cell filled in by the current player. If the cell is already occupied then return the board unchanged.''' if self.board[y][x] == ' ': return lens.board[y][x].set(self.player)(self) return self
def make_move(self, x, y): '''Return a board with a cell filled in by the current player. If the cell is already occupied then return the board unchanged.''' if self.board[y][x] == ' ': return lens.board[y][x].set(self.player)(self) return self
[ "Return", "a", "board", "with", "a", "cell", "filled", "in", "by", "the", "current", "player", ".", "If", "the", "cell", "is", "already", "occupied", "then", "return", "the", "board", "unchanged", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/naughts_and_crosses.py#L42-L47
[ "def", "make_move", "(", "self", ",", "x", ",", "y", ")", ":", "if", "self", ".", "board", "[", "y", "]", "[", "x", "]", "==", "' '", ":", "return", "lens", ".", "board", "[", "y", "]", "[", "x", "]", ".", "set", "(", "self", ".", "player", ")", "(", "self", ")", "return", "self" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
Board.winner
The winner of this board if one exists.
examples/naughts_and_crosses.py
def winner(self): 'The winner of this board if one exists.' for potential_win in self._potential_wins(): if potential_win == tuple('XXX'): return Outcome.win_for_crosses elif potential_win == tuple('OOO'): return Outcome.win_for_naughts if self._count(' ') == 0: return Outcome.draw return Outcome.ongoing
def winner(self): 'The winner of this board if one exists.' for potential_win in self._potential_wins(): if potential_win == tuple('XXX'): return Outcome.win_for_crosses elif potential_win == tuple('OOO'): return Outcome.win_for_naughts if self._count(' ') == 0: return Outcome.draw return Outcome.ongoing
[ "The", "winner", "of", "this", "board", "if", "one", "exists", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/naughts_and_crosses.py#L55-L64
[ "def", "winner", "(", "self", ")", ":", "for", "potential_win", "in", "self", ".", "_potential_wins", "(", ")", ":", "if", "potential_win", "==", "tuple", "(", "'XXX'", ")", ":", "return", "Outcome", ".", "win_for_crosses", "elif", "potential_win", "==", "tuple", "(", "'OOO'", ")", ":", "return", "Outcome", ".", "win_for_naughts", "if", "self", ".", "_count", "(", "' '", ")", "==", "0", ":", "return", "Outcome", ".", "draw", "return", "Outcome", ".", "ongoing" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
Board._potential_wins
Generates all the combinations of board positions that need to be checked for a win.
examples/naughts_and_crosses.py
def _potential_wins(self): '''Generates all the combinations of board positions that need to be checked for a win.''' yield from self.board yield from zip(*self.board) yield self.board[0][0], self.board[1][1], self.board[2][2] yield self.board[0][2], self.board[1][1], self.board[2][0]
def _potential_wins(self): '''Generates all the combinations of board positions that need to be checked for a win.''' yield from self.board yield from zip(*self.board) yield self.board[0][0], self.board[1][1], self.board[2][2] yield self.board[0][2], self.board[1][1], self.board[2][0]
[ "Generates", "all", "the", "combinations", "of", "board", "positions", "that", "need", "to", "be", "checked", "for", "a", "win", "." ]
ingolemo/python-lenses
python
https://github.com/ingolemo/python-lenses/blob/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf/examples/naughts_and_crosses.py#L71-L77
[ "def", "_potential_wins", "(", "self", ")", ":", "yield", "from", "self", ".", "board", "yield", "from", "zip", "(", "*", "self", ".", "board", ")", "yield", "self", ".", "board", "[", "0", "]", "[", "0", "]", ",", "self", ".", "board", "[", "1", "]", "[", "1", "]", ",", "self", ".", "board", "[", "2", "]", "[", "2", "]", "yield", "self", ".", "board", "[", "0", "]", "[", "2", "]", ",", "self", ".", "board", "[", "1", "]", "[", "1", "]", ",", "self", ".", "board", "[", "2", "]", "[", "0", "]" ]
a3a6ed0a31f6674451e542e7380a8aa16e6f8edf
test
S3Pipeline.process_item
Process single item. Add item to items and then upload to S3 if size of items >= max_chunk_size.
s3pipeline/pipelines.py
def process_item(self, item, spider): """ Process single item. Add item to items and then upload to S3 if size of items >= max_chunk_size. """ self.items.append(item) if len(self.items) >= self.max_chunk_size: self._upload_chunk(spider) return item
def process_item(self, item, spider): """ Process single item. Add item to items and then upload to S3 if size of items >= max_chunk_size. """ self.items.append(item) if len(self.items) >= self.max_chunk_size: self._upload_chunk(spider) return item
[ "Process", "single", "item", ".", "Add", "item", "to", "items", "and", "then", "upload", "to", "S3", "if", "size", "of", "items", ">", "=", "max_chunk_size", "." ]
orangain/scrapy-s3pipeline
python
https://github.com/orangain/scrapy-s3pipeline/blob/6301a3a057da6407b04a09c717498026f88706a4/s3pipeline/pipelines.py#L45-L54
[ "def", "process_item", "(", "self", ",", "item", ",", "spider", ")", ":", "self", ".", "items", ".", "append", "(", "item", ")", "if", "len", "(", "self", ".", "items", ")", ">=", "self", ".", "max_chunk_size", ":", "self", ".", "_upload_chunk", "(", "spider", ")", "return", "item" ]
6301a3a057da6407b04a09c717498026f88706a4
test
S3Pipeline.open_spider
Callback function when spider is open.
s3pipeline/pipelines.py
def open_spider(self, spider): """ Callback function when spider is open. """ # Store timestamp to replace {time} in S3PIPELINE_URL self.ts = datetime.utcnow().replace(microsecond=0).isoformat().replace(':', '-')
def open_spider(self, spider): """ Callback function when spider is open. """ # Store timestamp to replace {time} in S3PIPELINE_URL self.ts = datetime.utcnow().replace(microsecond=0).isoformat().replace(':', '-')
[ "Callback", "function", "when", "spider", "is", "open", "." ]
orangain/scrapy-s3pipeline
python
https://github.com/orangain/scrapy-s3pipeline/blob/6301a3a057da6407b04a09c717498026f88706a4/s3pipeline/pipelines.py#L56-L61
[ "def", "open_spider", "(", "self", ",", "spider", ")", ":", "# Store timestamp to replace {time} in S3PIPELINE_URL", "self", ".", "ts", "=", "datetime", ".", "utcnow", "(", ")", ".", "replace", "(", "microsecond", "=", "0", ")", ".", "isoformat", "(", ")", ".", "replace", "(", "':'", ",", "'-'", ")" ]
6301a3a057da6407b04a09c717498026f88706a4
test
S3Pipeline._upload_chunk
Do upload items to S3.
s3pipeline/pipelines.py
def _upload_chunk(self, spider): """ Do upload items to S3. """ if not self.items: return # Do nothing when items is empty. f = self._make_fileobj() # Build object key by replacing variables in object key template. object_key = self.object_key_template.format(**self._get_uri_params(spider)) try: self.s3.upload_fileobj(f, self.bucket_name, object_key) except ClientError: self.stats.inc_value('pipeline/s3/fail') raise else: self.stats.inc_value('pipeline/s3/success') finally: # Prepare for the next chunk self.chunk_number += len(self.items) self.items = []
def _upload_chunk(self, spider): """ Do upload items to S3. """ if not self.items: return # Do nothing when items is empty. f = self._make_fileobj() # Build object key by replacing variables in object key template. object_key = self.object_key_template.format(**self._get_uri_params(spider)) try: self.s3.upload_fileobj(f, self.bucket_name, object_key) except ClientError: self.stats.inc_value('pipeline/s3/fail') raise else: self.stats.inc_value('pipeline/s3/success') finally: # Prepare for the next chunk self.chunk_number += len(self.items) self.items = []
[ "Do", "upload", "items", "to", "S3", "." ]
orangain/scrapy-s3pipeline
python
https://github.com/orangain/scrapy-s3pipeline/blob/6301a3a057da6407b04a09c717498026f88706a4/s3pipeline/pipelines.py#L70-L93
[ "def", "_upload_chunk", "(", "self", ",", "spider", ")", ":", "if", "not", "self", ".", "items", ":", "return", "# Do nothing when items is empty.", "f", "=", "self", ".", "_make_fileobj", "(", ")", "# Build object key by replacing variables in object key template.", "object_key", "=", "self", ".", "object_key_template", ".", "format", "(", "*", "*", "self", ".", "_get_uri_params", "(", "spider", ")", ")", "try", ":", "self", ".", "s3", ".", "upload_fileobj", "(", "f", ",", "self", ".", "bucket_name", ",", "object_key", ")", "except", "ClientError", ":", "self", ".", "stats", ".", "inc_value", "(", "'pipeline/s3/fail'", ")", "raise", "else", ":", "self", ".", "stats", ".", "inc_value", "(", "'pipeline/s3/success'", ")", "finally", ":", "# Prepare for the next chunk", "self", ".", "chunk_number", "+=", "len", "(", "self", ".", "items", ")", "self", ".", "items", "=", "[", "]" ]
6301a3a057da6407b04a09c717498026f88706a4
test
S3Pipeline._make_fileobj
Build file object from items.
s3pipeline/pipelines.py
def _make_fileobj(self): """ Build file object from items. """ bio = BytesIO() f = gzip.GzipFile(mode='wb', fileobj=bio) if self.use_gzip else bio # Build file object using ItemExporter exporter = JsonLinesItemExporter(f) exporter.start_exporting() for item in self.items: exporter.export_item(item) exporter.finish_exporting() if f is not bio: f.close() # Close the file if GzipFile # Seek to the top of file to be read later bio.seek(0) return bio
def _make_fileobj(self): """ Build file object from items. """ bio = BytesIO() f = gzip.GzipFile(mode='wb', fileobj=bio) if self.use_gzip else bio # Build file object using ItemExporter exporter = JsonLinesItemExporter(f) exporter.start_exporting() for item in self.items: exporter.export_item(item) exporter.finish_exporting() if f is not bio: f.close() # Close the file if GzipFile # Seek to the top of file to be read later bio.seek(0) return bio
[ "Build", "file", "object", "from", "items", "." ]
orangain/scrapy-s3pipeline
python
https://github.com/orangain/scrapy-s3pipeline/blob/6301a3a057da6407b04a09c717498026f88706a4/s3pipeline/pipelines.py#L104-L125
[ "def", "_make_fileobj", "(", "self", ")", ":", "bio", "=", "BytesIO", "(", ")", "f", "=", "gzip", ".", "GzipFile", "(", "mode", "=", "'wb'", ",", "fileobj", "=", "bio", ")", "if", "self", ".", "use_gzip", "else", "bio", "# Build file object using ItemExporter", "exporter", "=", "JsonLinesItemExporter", "(", "f", ")", "exporter", ".", "start_exporting", "(", ")", "for", "item", "in", "self", ".", "items", ":", "exporter", ".", "export_item", "(", "item", ")", "exporter", ".", "finish_exporting", "(", ")", "if", "f", "is", "not", "bio", ":", "f", ".", "close", "(", ")", "# Close the file if GzipFile", "# Seek to the top of file to be read later", "bio", ".", "seek", "(", "0", ")", "return", "bio" ]
6301a3a057da6407b04a09c717498026f88706a4
test
update_wrapper
To be used under python2.4 because functools.update_wrapper() is available only from python2.5+
src/kwonly_args/utils.py
def update_wrapper(wrapper, wrapped): """ To be used under python2.4 because functools.update_wrapper() is available only from python2.5+ """ for attr_name in ('__module__', '__name__', '__doc__'): attr_value = getattr(wrapped, attr_name, None) if attr_value is not None: setattr(wrapper, attr_name, attr_value) wrapper.__dict__.update(getattr(wrapped, '__dict__', {})) return wrapper
def update_wrapper(wrapper, wrapped): """ To be used under python2.4 because functools.update_wrapper() is available only from python2.5+ """ for attr_name in ('__module__', '__name__', '__doc__'): attr_value = getattr(wrapped, attr_name, None) if attr_value is not None: setattr(wrapper, attr_name, attr_value) wrapper.__dict__.update(getattr(wrapped, '__dict__', {})) return wrapper
[ "To", "be", "used", "under", "python2", ".", "4", "because", "functools", ".", "update_wrapper", "()", "is", "available", "only", "from", "python2", ".", "5", "+" ]
pasztorpisti/kwonly-args
python
https://github.com/pasztorpisti/kwonly-args/blob/640b7f89514cce2167f510d661e0e5f65126d70d/src/kwonly_args/utils.py#L1-L8
[ "def", "update_wrapper", "(", "wrapper", ",", "wrapped", ")", ":", "for", "attr_name", "in", "(", "'__module__'", ",", "'__name__'", ",", "'__doc__'", ")", ":", "attr_value", "=", "getattr", "(", "wrapped", ",", "attr_name", ",", "None", ")", "if", "attr_value", "is", "not", "None", ":", "setattr", "(", "wrapper", ",", "attr_name", ",", "attr_value", ")", "wrapper", ".", "__dict__", ".", "update", "(", "getattr", "(", "wrapped", ",", "'__dict__'", ",", "{", "}", ")", ")", "return", "wrapper" ]
640b7f89514cce2167f510d661e0e5f65126d70d
test
Client.get_account_state
Returns the account state information associated with a specific address. :param address: a 34-bit length address (eg. AJBENSwajTzQtwyJFkiJSv7MAaaMc7DsRz) :type address: str :return: dictionary containing the account state information :rtype: dict
neojsonrpc/client.py
def get_account_state(self, address, **kwargs): """ Returns the account state information associated with a specific address. :param address: a 34-bit length address (eg. AJBENSwajTzQtwyJFkiJSv7MAaaMc7DsRz) :type address: str :return: dictionary containing the account state information :rtype: dict """ return self._call(JSONRPCMethods.GET_ACCOUNT_STATE.value, params=[address, ], **kwargs)
def get_account_state(self, address, **kwargs): """ Returns the account state information associated with a specific address. :param address: a 34-bit length address (eg. AJBENSwajTzQtwyJFkiJSv7MAaaMc7DsRz) :type address: str :return: dictionary containing the account state information :rtype: dict """ return self._call(JSONRPCMethods.GET_ACCOUNT_STATE.value, params=[address, ], **kwargs)
[ "Returns", "the", "account", "state", "information", "associated", "with", "a", "specific", "address", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L74-L83
[ "def", "get_account_state", "(", "self", ",", "address", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_ACCOUNT_STATE", ".", "value", ",", "params", "=", "[", "address", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_asset_state
Returns the asset information associated with a specific asset ID. :param asset_id: an asset identifier (the transaction ID of the RegistTransaction when the asset is registered) :type asset_id: str :return: dictionary containing the asset state information :rtype: dict
neojsonrpc/client.py
def get_asset_state(self, asset_id, **kwargs): """ Returns the asset information associated with a specific asset ID. :param asset_id: an asset identifier (the transaction ID of the RegistTransaction when the asset is registered) :type asset_id: str :return: dictionary containing the asset state information :rtype: dict """ return self._call(JSONRPCMethods.GET_ASSET_STATE.value, params=[asset_id, ], **kwargs)
def get_asset_state(self, asset_id, **kwargs): """ Returns the asset information associated with a specific asset ID. :param asset_id: an asset identifier (the transaction ID of the RegistTransaction when the asset is registered) :type asset_id: str :return: dictionary containing the asset state information :rtype: dict """ return self._call(JSONRPCMethods.GET_ASSET_STATE.value, params=[asset_id, ], **kwargs)
[ "Returns", "the", "asset", "information", "associated", "with", "a", "specific", "asset", "ID", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L85-L96
[ "def", "get_asset_state", "(", "self", ",", "asset_id", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_ASSET_STATE", ".", "value", ",", "params", "=", "[", "asset_id", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_block
Returns the block information associated with a specific hash value or block index. :param block_hash: a block hash value or a block index (block height) :param verbose: a boolean indicating whether the detailed block information should be returned in JSON format (otherwise the block information is returned as an hexadecimal string by the JSON-RPC endpoint) :type block_hash: str or int :type verbose: bool :return: dictionary containing the block information (or an hexadecimal string if verbose is set to False) :rtype: dict or str
neojsonrpc/client.py
def get_block(self, block_hash, verbose=True, **kwargs): """ Returns the block information associated with a specific hash value or block index. :param block_hash: a block hash value or a block index (block height) :param verbose: a boolean indicating whether the detailed block information should be returned in JSON format (otherwise the block information is returned as an hexadecimal string by the JSON-RPC endpoint) :type block_hash: str or int :type verbose: bool :return: dictionary containing the block information (or an hexadecimal string if verbose is set to False) :rtype: dict or str """ return self._call( JSONRPCMethods.GET_BLOCK.value, params=[block_hash, int(verbose), ], **kwargs)
def get_block(self, block_hash, verbose=True, **kwargs): """ Returns the block information associated with a specific hash value or block index. :param block_hash: a block hash value or a block index (block height) :param verbose: a boolean indicating whether the detailed block information should be returned in JSON format (otherwise the block information is returned as an hexadecimal string by the JSON-RPC endpoint) :type block_hash: str or int :type verbose: bool :return: dictionary containing the block information (or an hexadecimal string if verbose is set to False) :rtype: dict or str """ return self._call( JSONRPCMethods.GET_BLOCK.value, params=[block_hash, int(verbose), ], **kwargs)
[ "Returns", "the", "block", "information", "associated", "with", "a", "specific", "hash", "value", "or", "block", "index", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L107-L124
[ "def", "get_block", "(", "self", ",", "block_hash", ",", "verbose", "=", "True", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_BLOCK", ".", "value", ",", "params", "=", "[", "block_hash", ",", "int", "(", "verbose", ")", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_block_hash
Returns the hash value associated with a specific block index. :param block_index: a block index (block height) :type block_index: int :return: hash of the block associated with the considered index :rtype: str
neojsonrpc/client.py
def get_block_hash(self, block_index, **kwargs): """ Returns the hash value associated with a specific block index. :param block_index: a block index (block height) :type block_index: int :return: hash of the block associated with the considered index :rtype: str """ return self._call(JSONRPCMethods.GET_BLOCK_HASH.value, [block_index, ], **kwargs)
def get_block_hash(self, block_index, **kwargs): """ Returns the hash value associated with a specific block index. :param block_index: a block index (block height) :type block_index: int :return: hash of the block associated with the considered index :rtype: str """ return self._call(JSONRPCMethods.GET_BLOCK_HASH.value, [block_index, ], **kwargs)
[ "Returns", "the", "hash", "value", "associated", "with", "a", "specific", "block", "index", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L135-L144
[ "def", "get_block_hash", "(", "self", ",", "block_index", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_BLOCK_HASH", ".", "value", ",", "[", "block_index", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_block_sys_fee
Returns the system fees associated with a specific block index. :param block_index: a block index (block height) :type block_index: int :return: system fees of the block, expressed in NeoGas units :rtype: str
neojsonrpc/client.py
def get_block_sys_fee(self, block_index, **kwargs): """ Returns the system fees associated with a specific block index. :param block_index: a block index (block height) :type block_index: int :return: system fees of the block, expressed in NeoGas units :rtype: str """ return self._call(JSONRPCMethods.GET_BLOCK_SYS_FEE.value, [block_index, ], **kwargs)
def get_block_sys_fee(self, block_index, **kwargs): """ Returns the system fees associated with a specific block index. :param block_index: a block index (block height) :type block_index: int :return: system fees of the block, expressed in NeoGas units :rtype: str """ return self._call(JSONRPCMethods.GET_BLOCK_SYS_FEE.value, [block_index, ], **kwargs)
[ "Returns", "the", "system", "fees", "associated", "with", "a", "specific", "block", "index", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L146-L155
[ "def", "get_block_sys_fee", "(", "self", ",", "block_index", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_BLOCK_SYS_FEE", ".", "value", ",", "[", "block_index", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_contract_state
Returns the contract information associated with a specific script hash. :param script_hash: contract script hash :type script_hash: str :return: dictionary containing the contract information :rtype: dict
neojsonrpc/client.py
def get_contract_state(self, script_hash, **kwargs): """ Returns the contract information associated with a specific script hash. :param script_hash: contract script hash :type script_hash: str :return: dictionary containing the contract information :rtype: dict """ return self._call(JSONRPCMethods.GET_CONTRACT_STATE.value, [script_hash, ], **kwargs)
def get_contract_state(self, script_hash, **kwargs): """ Returns the contract information associated with a specific script hash. :param script_hash: contract script hash :type script_hash: str :return: dictionary containing the contract information :rtype: dict """ return self._call(JSONRPCMethods.GET_CONTRACT_STATE.value, [script_hash, ], **kwargs)
[ "Returns", "the", "contract", "information", "associated", "with", "a", "specific", "script", "hash", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L166-L175
[ "def", "get_contract_state", "(", "self", ",", "script_hash", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_CONTRACT_STATE", ".", "value", ",", "[", "script_hash", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_raw_transaction
Returns detailed information associated with a specific transaction hash. :param tx_hash: transaction hash :param verbose: a boolean indicating whether the detailed transaction information should be returned in JSON format (otherwise the transaction information is returned as an hexadecimal string by the JSON-RPC endpoint) :type tx_hash: str :type verbose: bool :return: dictionary containing the transaction information (or an hexadecimal string if verbose is set to False) :rtype: dict or str
neojsonrpc/client.py
def get_raw_transaction(self, tx_hash, verbose=True, **kwargs): """ Returns detailed information associated with a specific transaction hash. :param tx_hash: transaction hash :param verbose: a boolean indicating whether the detailed transaction information should be returned in JSON format (otherwise the transaction information is returned as an hexadecimal string by the JSON-RPC endpoint) :type tx_hash: str :type verbose: bool :return: dictionary containing the transaction information (or an hexadecimal string if verbose is set to False) :rtype: dict or str """ return self._call( JSONRPCMethods.GET_RAW_TRANSACTION.value, params=[tx_hash, int(verbose), ], **kwargs)
def get_raw_transaction(self, tx_hash, verbose=True, **kwargs): """ Returns detailed information associated with a specific transaction hash. :param tx_hash: transaction hash :param verbose: a boolean indicating whether the detailed transaction information should be returned in JSON format (otherwise the transaction information is returned as an hexadecimal string by the JSON-RPC endpoint) :type tx_hash: str :type verbose: bool :return: dictionary containing the transaction information (or an hexadecimal string if verbose is set to False) :rtype: dict or str """ return self._call( JSONRPCMethods.GET_RAW_TRANSACTION.value, params=[tx_hash, int(verbose), ], **kwargs)
[ "Returns", "detailed", "information", "associated", "with", "a", "specific", "transaction", "hash", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L186-L203
[ "def", "get_raw_transaction", "(", "self", ",", "tx_hash", ",", "verbose", "=", "True", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_RAW_TRANSACTION", ".", "value", ",", "params", "=", "[", "tx_hash", ",", "int", "(", "verbose", ")", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_storage
Returns the value stored in the storage of a contract script hash for a given key. :param script_hash: contract script hash :param key: key to look up in the storage :type script_hash: str :type key: str :return: value associated with the storage key :rtype: bytearray
neojsonrpc/client.py
def get_storage(self, script_hash, key, **kwargs): """ Returns the value stored in the storage of a contract script hash for a given key. :param script_hash: contract script hash :param key: key to look up in the storage :type script_hash: str :type key: str :return: value associated with the storage key :rtype: bytearray """ hexkey = binascii.hexlify(key.encode('utf-8')).decode('utf-8') hexresult = self._call( JSONRPCMethods.GET_STORAGE.value, params=[script_hash, hexkey, ], **kwargs) try: assert hexresult result = bytearray(binascii.unhexlify(hexresult.encode('utf-8'))) except AssertionError: result = hexresult return result
def get_storage(self, script_hash, key, **kwargs): """ Returns the value stored in the storage of a contract script hash for a given key. :param script_hash: contract script hash :param key: key to look up in the storage :type script_hash: str :type key: str :return: value associated with the storage key :rtype: bytearray """ hexkey = binascii.hexlify(key.encode('utf-8')).decode('utf-8') hexresult = self._call( JSONRPCMethods.GET_STORAGE.value, params=[script_hash, hexkey, ], **kwargs) try: assert hexresult result = bytearray(binascii.unhexlify(hexresult.encode('utf-8'))) except AssertionError: result = hexresult return result
[ "Returns", "the", "value", "stored", "in", "the", "storage", "of", "a", "contract", "script", "hash", "for", "a", "given", "key", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L205-L224
[ "def", "get_storage", "(", "self", ",", "script_hash", ",", "key", ",", "*", "*", "kwargs", ")", ":", "hexkey", "=", "binascii", ".", "hexlify", "(", "key", ".", "encode", "(", "'utf-8'", ")", ")", ".", "decode", "(", "'utf-8'", ")", "hexresult", "=", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_STORAGE", ".", "value", ",", "params", "=", "[", "script_hash", ",", "hexkey", ",", "]", ",", "*", "*", "kwargs", ")", "try", ":", "assert", "hexresult", "result", "=", "bytearray", "(", "binascii", ".", "unhexlify", "(", "hexresult", ".", "encode", "(", "'utf-8'", ")", ")", ")", "except", "AssertionError", ":", "result", "=", "hexresult", "return", "result" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.get_tx_out
Returns the transaction output information corresponding to a hash and index. :param tx_hash: transaction hash :param index: index of the transaction output to be obtained in the transaction (starts from 0) :type tx_hash: str :type index: int :return: dictionary containing the transaction output :rtype: dict
neojsonrpc/client.py
def get_tx_out(self, tx_hash, index, **kwargs): """ Returns the transaction output information corresponding to a hash and index. :param tx_hash: transaction hash :param index: index of the transaction output to be obtained in the transaction (starts from 0) :type tx_hash: str :type index: int :return: dictionary containing the transaction output :rtype: dict """ return self._call(JSONRPCMethods.GET_TX_OUT.value, params=[tx_hash, index, ], **kwargs)
def get_tx_out(self, tx_hash, index, **kwargs): """ Returns the transaction output information corresponding to a hash and index. :param tx_hash: transaction hash :param index: index of the transaction output to be obtained in the transaction (starts from 0) :type tx_hash: str :type index: int :return: dictionary containing the transaction output :rtype: dict """ return self._call(JSONRPCMethods.GET_TX_OUT.value, params=[tx_hash, index, ], **kwargs)
[ "Returns", "the", "transaction", "output", "information", "corresponding", "to", "a", "hash", "and", "index", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L226-L238
[ "def", "get_tx_out", "(", "self", ",", "tx_hash", ",", "index", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "GET_TX_OUT", ".", "value", ",", "params", "=", "[", "tx_hash", ",", "index", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.invoke
Invokes a contract with given parameters and returns the result. It should be noted that the name of the function invoked in the contract should be part of paramaters. :param script_hash: contract script hash :param params: list of paramaters to be passed in to the smart contract :type script_hash: str :type params: list :return: result of the invocation :rtype: dictionary
neojsonrpc/client.py
def invoke(self, script_hash, params, **kwargs): """ Invokes a contract with given parameters and returns the result. It should be noted that the name of the function invoked in the contract should be part of paramaters. :param script_hash: contract script hash :param params: list of paramaters to be passed in to the smart contract :type script_hash: str :type params: list :return: result of the invocation :rtype: dictionary """ contract_params = encode_invocation_params(params) raw_result = self._call( JSONRPCMethods.INVOKE.value, [script_hash, contract_params, ], **kwargs) return decode_invocation_result(raw_result)
def invoke(self, script_hash, params, **kwargs): """ Invokes a contract with given parameters and returns the result. It should be noted that the name of the function invoked in the contract should be part of paramaters. :param script_hash: contract script hash :param params: list of paramaters to be passed in to the smart contract :type script_hash: str :type params: list :return: result of the invocation :rtype: dictionary """ contract_params = encode_invocation_params(params) raw_result = self._call( JSONRPCMethods.INVOKE.value, [script_hash, contract_params, ], **kwargs) return decode_invocation_result(raw_result)
[ "Invokes", "a", "contract", "with", "given", "parameters", "and", "returns", "the", "result", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L258-L275
[ "def", "invoke", "(", "self", ",", "script_hash", ",", "params", ",", "*", "*", "kwargs", ")", ":", "contract_params", "=", "encode_invocation_params", "(", "params", ")", "raw_result", "=", "self", ".", "_call", "(", "JSONRPCMethods", ".", "INVOKE", ".", "value", ",", "[", "script_hash", ",", "contract_params", ",", "]", ",", "*", "*", "kwargs", ")", "return", "decode_invocation_result", "(", "raw_result", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.invoke_function
Invokes a contract's function with given parameters and returns the result. :param script_hash: contract script hash :param operation: name of the operation to invoke :param params: list of paramaters to be passed in to the smart contract :type script_hash: str :type operation: str :type params: list :return: result of the invocation :rtype: dictionary
neojsonrpc/client.py
def invoke_function(self, script_hash, operation, params, **kwargs): """ Invokes a contract's function with given parameters and returns the result. :param script_hash: contract script hash :param operation: name of the operation to invoke :param params: list of paramaters to be passed in to the smart contract :type script_hash: str :type operation: str :type params: list :return: result of the invocation :rtype: dictionary """ contract_params = encode_invocation_params(params) raw_result = self._call( JSONRPCMethods.INVOKE_FUNCTION.value, [script_hash, operation, contract_params, ], **kwargs) return decode_invocation_result(raw_result)
def invoke_function(self, script_hash, operation, params, **kwargs): """ Invokes a contract's function with given parameters and returns the result. :param script_hash: contract script hash :param operation: name of the operation to invoke :param params: list of paramaters to be passed in to the smart contract :type script_hash: str :type operation: str :type params: list :return: result of the invocation :rtype: dictionary """ contract_params = encode_invocation_params(params) raw_result = self._call( JSONRPCMethods.INVOKE_FUNCTION.value, [script_hash, operation, contract_params, ], **kwargs) return decode_invocation_result(raw_result)
[ "Invokes", "a", "contract", "s", "function", "with", "given", "parameters", "and", "returns", "the", "result", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L277-L294
[ "def", "invoke_function", "(", "self", ",", "script_hash", ",", "operation", ",", "params", ",", "*", "*", "kwargs", ")", ":", "contract_params", "=", "encode_invocation_params", "(", "params", ")", "raw_result", "=", "self", ".", "_call", "(", "JSONRPCMethods", ".", "INVOKE_FUNCTION", ".", "value", ",", "[", "script_hash", ",", "operation", ",", "contract_params", ",", "]", ",", "*", "*", "kwargs", ")", "return", "decode_invocation_result", "(", "raw_result", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.invoke_script
Invokes a script on the VM and returns the result. :param script: script runnable by the VM :type script: str :return: result of the invocation :rtype: dictionary
neojsonrpc/client.py
def invoke_script(self, script, **kwargs): """ Invokes a script on the VM and returns the result. :param script: script runnable by the VM :type script: str :return: result of the invocation :rtype: dictionary """ raw_result = self._call(JSONRPCMethods.INVOKE_SCRIPT.value, [script, ], **kwargs) return decode_invocation_result(raw_result)
def invoke_script(self, script, **kwargs): """ Invokes a script on the VM and returns the result. :param script: script runnable by the VM :type script: str :return: result of the invocation :rtype: dictionary """ raw_result = self._call(JSONRPCMethods.INVOKE_SCRIPT.value, [script, ], **kwargs) return decode_invocation_result(raw_result)
[ "Invokes", "a", "script", "on", "the", "VM", "and", "returns", "the", "result", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L296-L306
[ "def", "invoke_script", "(", "self", ",", "script", ",", "*", "*", "kwargs", ")", ":", "raw_result", "=", "self", ".", "_call", "(", "JSONRPCMethods", ".", "INVOKE_SCRIPT", ".", "value", ",", "[", "script", ",", "]", ",", "*", "*", "kwargs", ")", "return", "decode_invocation_result", "(", "raw_result", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.send_raw_transaction
Broadcasts a transaction over the NEO network and returns the result. :param hextx: hexadecimal string that has been serialized :type hextx: str :return: result of the transaction :rtype: bool
neojsonrpc/client.py
def send_raw_transaction(self, hextx, **kwargs): """ Broadcasts a transaction over the NEO network and returns the result. :param hextx: hexadecimal string that has been serialized :type hextx: str :return: result of the transaction :rtype: bool """ return self._call(JSONRPCMethods.SEND_RAW_TRANSACTION.value, [hextx, ], **kwargs)
def send_raw_transaction(self, hextx, **kwargs): """ Broadcasts a transaction over the NEO network and returns the result. :param hextx: hexadecimal string that has been serialized :type hextx: str :return: result of the transaction :rtype: bool """ return self._call(JSONRPCMethods.SEND_RAW_TRANSACTION.value, [hextx, ], **kwargs)
[ "Broadcasts", "a", "transaction", "over", "the", "NEO", "network", "and", "returns", "the", "result", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L308-L317
[ "def", "send_raw_transaction", "(", "self", ",", "hextx", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "SEND_RAW_TRANSACTION", ".", "value", ",", "[", "hextx", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client.validate_address
Validates if the considered string is a valid NEO address. :param hex: string containing a potential NEO address :type hex: str :return: dictionary containing the result of the verification :rtype: dictionary
neojsonrpc/client.py
def validate_address(self, addr, **kwargs): """ Validates if the considered string is a valid NEO address. :param hex: string containing a potential NEO address :type hex: str :return: dictionary containing the result of the verification :rtype: dictionary """ return self._call(JSONRPCMethods.VALIDATE_ADDRESS.value, [addr, ], **kwargs)
def validate_address(self, addr, **kwargs): """ Validates if the considered string is a valid NEO address. :param hex: string containing a potential NEO address :type hex: str :return: dictionary containing the result of the verification :rtype: dictionary """ return self._call(JSONRPCMethods.VALIDATE_ADDRESS.value, [addr, ], **kwargs)
[ "Validates", "if", "the", "considered", "string", "is", "a", "valid", "NEO", "address", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L319-L328
[ "def", "validate_address", "(", "self", ",", "addr", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_call", "(", "JSONRPCMethods", ".", "VALIDATE_ADDRESS", ".", "value", ",", "[", "addr", ",", "]", ",", "*", "*", "kwargs", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
Client._call
Calls the JSON-RPC endpoint.
neojsonrpc/client.py
def _call(self, method, params=None, request_id=None): """ Calls the JSON-RPC endpoint. """ params = params or [] # Determines which 'id' value to use and increment the counter associated with the current # client instance if applicable. rid = request_id or self._id_counter if request_id is None: self._id_counter += 1 # Prepares the payload and the headers that will be used to forge the request. payload = {'jsonrpc': '2.0', 'method': method, 'params': params, 'id': rid} headers = {'Content-Type': 'application/json'} scheme = 'https' if self.tls else 'http' url = '{}://{}:{}'.format(scheme, self.host, self.port) # Calls the JSON-RPC endpoint! try: response = self.session.post(url, headers=headers, data=json.dumps(payload)) response.raise_for_status() except HTTPError: raise TransportError( 'Got unsuccessful response from server (status code: {})'.format( response.status_code), response=response) # Ensures the response body can be deserialized to JSON. try: response_data = response.json() except ValueError as e: raise ProtocolError( 'Unable to deserialize response body: {}'.format(e), response=response) # Properly handles potential errors. if response_data.get('error'): code = response_data['error'].get('code', '') message = response_data['error'].get('message', '') raise ProtocolError( 'Error[{}] {}'.format(code, message), response=response, data=response_data) elif 'result' not in response_data: raise ProtocolError( 'Response is empty (result field is missing)', response=response, data=response_data) return response_data['result']
def _call(self, method, params=None, request_id=None): """ Calls the JSON-RPC endpoint. """ params = params or [] # Determines which 'id' value to use and increment the counter associated with the current # client instance if applicable. rid = request_id or self._id_counter if request_id is None: self._id_counter += 1 # Prepares the payload and the headers that will be used to forge the request. payload = {'jsonrpc': '2.0', 'method': method, 'params': params, 'id': rid} headers = {'Content-Type': 'application/json'} scheme = 'https' if self.tls else 'http' url = '{}://{}:{}'.format(scheme, self.host, self.port) # Calls the JSON-RPC endpoint! try: response = self.session.post(url, headers=headers, data=json.dumps(payload)) response.raise_for_status() except HTTPError: raise TransportError( 'Got unsuccessful response from server (status code: {})'.format( response.status_code), response=response) # Ensures the response body can be deserialized to JSON. try: response_data = response.json() except ValueError as e: raise ProtocolError( 'Unable to deserialize response body: {}'.format(e), response=response) # Properly handles potential errors. if response_data.get('error'): code = response_data['error'].get('code', '') message = response_data['error'].get('message', '') raise ProtocolError( 'Error[{}] {}'.format(code, message), response=response, data=response_data) elif 'result' not in response_data: raise ProtocolError( 'Response is empty (result field is missing)', response=response, data=response_data) return response_data['result']
[ "Calls", "the", "JSON", "-", "RPC", "endpoint", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/client.py#L334-L378
[ "def", "_call", "(", "self", ",", "method", ",", "params", "=", "None", ",", "request_id", "=", "None", ")", ":", "params", "=", "params", "or", "[", "]", "# Determines which 'id' value to use and increment the counter associated with the current", "# client instance if applicable.", "rid", "=", "request_id", "or", "self", ".", "_id_counter", "if", "request_id", "is", "None", ":", "self", ".", "_id_counter", "+=", "1", "# Prepares the payload and the headers that will be used to forge the request.", "payload", "=", "{", "'jsonrpc'", ":", "'2.0'", ",", "'method'", ":", "method", ",", "'params'", ":", "params", ",", "'id'", ":", "rid", "}", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", "scheme", "=", "'https'", "if", "self", ".", "tls", "else", "'http'", "url", "=", "'{}://{}:{}'", ".", "format", "(", "scheme", ",", "self", ".", "host", ",", "self", ".", "port", ")", "# Calls the JSON-RPC endpoint!", "try", ":", "response", "=", "self", ".", "session", ".", "post", "(", "url", ",", "headers", "=", "headers", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ")", "response", ".", "raise_for_status", "(", ")", "except", "HTTPError", ":", "raise", "TransportError", "(", "'Got unsuccessful response from server (status code: {})'", ".", "format", "(", "response", ".", "status_code", ")", ",", "response", "=", "response", ")", "# Ensures the response body can be deserialized to JSON.", "try", ":", "response_data", "=", "response", ".", "json", "(", ")", "except", "ValueError", "as", "e", ":", "raise", "ProtocolError", "(", "'Unable to deserialize response body: {}'", ".", "format", "(", "e", ")", ",", "response", "=", "response", ")", "# Properly handles potential errors.", "if", "response_data", ".", "get", "(", "'error'", ")", ":", "code", "=", "response_data", "[", "'error'", "]", ".", "get", "(", "'code'", ",", "''", ")", "message", "=", "response_data", "[", "'error'", "]", ".", "get", "(", "'message'", ",", "''", ")", "raise", "ProtocolError", "(", "'Error[{}] {}'", ".", "format", "(", "code", ",", "message", ")", ",", "response", "=", "response", ",", "data", "=", "response_data", ")", "elif", "'result'", "not", "in", "response_data", ":", "raise", "ProtocolError", "(", "'Response is empty (result field is missing)'", ",", "response", "=", "response", ",", "data", "=", "response_data", ")", "return", "response_data", "[", "'result'", "]" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
is_hash256
Returns True if the considered string is a valid SHA256 hash.
neojsonrpc/utils.py
def is_hash256(s): """ Returns True if the considered string is a valid SHA256 hash. """ if not s or not isinstance(s, str): return False return re.match('^[0-9A-F]{64}$', s.strip(), re.IGNORECASE)
def is_hash256(s): """ Returns True if the considered string is a valid SHA256 hash. """ if not s or not isinstance(s, str): return False return re.match('^[0-9A-F]{64}$', s.strip(), re.IGNORECASE)
[ "Returns", "True", "if", "the", "considered", "string", "is", "a", "valid", "SHA256", "hash", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/utils.py#L16-L20
[ "def", "is_hash256", "(", "s", ")", ":", "if", "not", "s", "or", "not", "isinstance", "(", "s", ",", "str", ")", ":", "return", "False", "return", "re", ".", "match", "(", "'^[0-9A-F]{64}$'", ",", "s", ".", "strip", "(", ")", ",", "re", ".", "IGNORECASE", ")" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
is_hash160
Returns True if the considered string is a valid RIPEMD160 hash.
neojsonrpc/utils.py
def is_hash160(s): """ Returns True if the considered string is a valid RIPEMD160 hash. """ if not s or not isinstance(s, str): return False if not len(s) == 40: return False for c in s: if (c < '0' or c > '9') and (c < 'A' or c > 'F') and (c < 'a' or c > 'f'): return False return True
def is_hash160(s): """ Returns True if the considered string is a valid RIPEMD160 hash. """ if not s or not isinstance(s, str): return False if not len(s) == 40: return False for c in s: if (c < '0' or c > '9') and (c < 'A' or c > 'F') and (c < 'a' or c > 'f'): return False return True
[ "Returns", "True", "if", "the", "considered", "string", "is", "a", "valid", "RIPEMD160", "hash", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/utils.py#L23-L32
[ "def", "is_hash160", "(", "s", ")", ":", "if", "not", "s", "or", "not", "isinstance", "(", "s", ",", "str", ")", ":", "return", "False", "if", "not", "len", "(", "s", ")", "==", "40", ":", "return", "False", "for", "c", "in", "s", ":", "if", "(", "c", "<", "'0'", "or", "c", ">", "'9'", ")", "and", "(", "c", "<", "'A'", "or", "c", ">", "'F'", ")", "and", "(", "c", "<", "'a'", "or", "c", ">", "'f'", ")", ":", "return", "False", "return", "True" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
encode_invocation_params
Returns a list of paramaters meant to be passed to JSON-RPC endpoints.
neojsonrpc/utils.py
def encode_invocation_params(params): """ Returns a list of paramaters meant to be passed to JSON-RPC endpoints. """ final_params = [] for p in params: if isinstance(p, bool): final_params.append({'type': ContractParameterTypes.BOOLEAN.value, 'value': p}) elif isinstance(p, int): final_params.append({'type': ContractParameterTypes.INTEGER.value, 'value': p}) elif is_hash256(p): final_params.append({'type': ContractParameterTypes.HASH256.value, 'value': p}) elif is_hash160(p): final_params.append({'type': ContractParameterTypes.HASH160.value, 'value': p}) elif isinstance(p, bytearray): final_params.append({'type': ContractParameterTypes.BYTE_ARRAY.value, 'value': p}) elif isinstance(p, str): final_params.append({'type': ContractParameterTypes.STRING.value, 'value': p}) elif isinstance(p, list): innerp = encode_invocation_params(p) final_params.append({'type': ContractParameterTypes.ARRAY.value, 'value': innerp}) return final_params
def encode_invocation_params(params): """ Returns a list of paramaters meant to be passed to JSON-RPC endpoints. """ final_params = [] for p in params: if isinstance(p, bool): final_params.append({'type': ContractParameterTypes.BOOLEAN.value, 'value': p}) elif isinstance(p, int): final_params.append({'type': ContractParameterTypes.INTEGER.value, 'value': p}) elif is_hash256(p): final_params.append({'type': ContractParameterTypes.HASH256.value, 'value': p}) elif is_hash160(p): final_params.append({'type': ContractParameterTypes.HASH160.value, 'value': p}) elif isinstance(p, bytearray): final_params.append({'type': ContractParameterTypes.BYTE_ARRAY.value, 'value': p}) elif isinstance(p, str): final_params.append({'type': ContractParameterTypes.STRING.value, 'value': p}) elif isinstance(p, list): innerp = encode_invocation_params(p) final_params.append({'type': ContractParameterTypes.ARRAY.value, 'value': innerp}) return final_params
[ "Returns", "a", "list", "of", "paramaters", "meant", "to", "be", "passed", "to", "JSON", "-", "RPC", "endpoints", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/utils.py#L35-L54
[ "def", "encode_invocation_params", "(", "params", ")", ":", "final_params", "=", "[", "]", "for", "p", "in", "params", ":", "if", "isinstance", "(", "p", ",", "bool", ")", ":", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "BOOLEAN", ".", "value", ",", "'value'", ":", "p", "}", ")", "elif", "isinstance", "(", "p", ",", "int", ")", ":", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "INTEGER", ".", "value", ",", "'value'", ":", "p", "}", ")", "elif", "is_hash256", "(", "p", ")", ":", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "HASH256", ".", "value", ",", "'value'", ":", "p", "}", ")", "elif", "is_hash160", "(", "p", ")", ":", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "HASH160", ".", "value", ",", "'value'", ":", "p", "}", ")", "elif", "isinstance", "(", "p", ",", "bytearray", ")", ":", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "BYTE_ARRAY", ".", "value", ",", "'value'", ":", "p", "}", ")", "elif", "isinstance", "(", "p", ",", "str", ")", ":", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "STRING", ".", "value", ",", "'value'", ":", "p", "}", ")", "elif", "isinstance", "(", "p", ",", "list", ")", ":", "innerp", "=", "encode_invocation_params", "(", "p", ")", "final_params", ".", "append", "(", "{", "'type'", ":", "ContractParameterTypes", ".", "ARRAY", ".", "value", ",", "'value'", ":", "innerp", "}", ")", "return", "final_params" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
decode_invocation_result
Tries to decode the values embedded in an invocation result dictionary.
neojsonrpc/utils.py
def decode_invocation_result(result): """ Tries to decode the values embedded in an invocation result dictionary. """ if 'stack' not in result: return result result = copy.deepcopy(result) result['stack'] = _decode_invocation_result_stack(result['stack']) return result
def decode_invocation_result(result): """ Tries to decode the values embedded in an invocation result dictionary. """ if 'stack' not in result: return result result = copy.deepcopy(result) result['stack'] = _decode_invocation_result_stack(result['stack']) return result
[ "Tries", "to", "decode", "the", "values", "embedded", "in", "an", "invocation", "result", "dictionary", "." ]
ellmetha/neojsonrpc
python
https://github.com/ellmetha/neojsonrpc/blob/e369b633a727482d5f9e310f0c3337ae5f7265db/neojsonrpc/utils.py#L57-L63
[ "def", "decode_invocation_result", "(", "result", ")", ":", "if", "'stack'", "not", "in", "result", ":", "return", "result", "result", "=", "copy", ".", "deepcopy", "(", "result", ")", "result", "[", "'stack'", "]", "=", "_decode_invocation_result_stack", "(", "result", "[", "'stack'", "]", ")", "return", "result" ]
e369b633a727482d5f9e310f0c3337ae5f7265db
test
first_kwonly_arg
Emulates keyword-only arguments under python2. Works with both python2 and python3. With this decorator you can convert all or some of the default arguments of your function into kwonly arguments. Use ``KWONLY_REQUIRED`` as the default value of required kwonly args. :param name: The name of the first default argument to be treated as a keyword-only argument. This default argument along with all default arguments that follow this one will be treated as keyword only arguments. You can also pass here the ``FIRST_DEFAULT_ARG`` constant in order to select the first default argument. This way you turn all default arguments into keyword-only arguments. As a shortcut you can use the ``@kwonly_defaults`` decorator (without any parameters) instead of ``@first_kwonly_arg(FIRST_DEFAULT_ARG)``. >>> from kwonly_args import first_kwonly_arg, KWONLY_REQUIRED, FIRST_DEFAULT_ARG, kwonly_defaults >>> >>> # this decoration converts the ``d1`` and ``d2`` default args into kwonly args >>> @first_kwonly_arg('d1') >>> def func(a0, a1, d0='d0', d1='d1', d2='d2', *args, **kwargs): >>> print(a0, a1, d0, d1, d2, args, kwargs) >>> >>> func(0, 1, 2, 3, 4) 0 1 2 d1 d2 (3, 4) {} >>> >>> func(0, 1, 2, 3, 4, d2='my_param') 0 1 2 d1 my_param (3, 4) {} >>> >>> # d0 is an optional deyword argument, d1 is required >>> def func(d0='d0', d1=KWONLY_REQUIRED): >>> print(d0, d1) >>> >>> # The ``FIRST_DEFAULT_ARG`` constant automatically selects the first default argument so it >>> # turns all default arguments into keyword-only ones. Both d0 and d1 are keyword-only arguments. >>> @first_kwonly_arg(FIRST_DEFAULT_ARG) >>> def func(a0, a1, d0='d0', d1='d1'): >>> print(a0, a1, d0, d1) >>> >>> # ``@kwonly_defaults`` is a shortcut for the ``@first_kwonly_arg(FIRST_DEFAULT_ARG)`` >>> # in the previous example. This example has the same effect as the previous one. >>> @kwonly_defaults >>> def func(a0, a1, d0='d0', d1='d1'): >>> print(a0, a1, d0, d1)
src/kwonly_args/__init__.py
def first_kwonly_arg(name): """ Emulates keyword-only arguments under python2. Works with both python2 and python3. With this decorator you can convert all or some of the default arguments of your function into kwonly arguments. Use ``KWONLY_REQUIRED`` as the default value of required kwonly args. :param name: The name of the first default argument to be treated as a keyword-only argument. This default argument along with all default arguments that follow this one will be treated as keyword only arguments. You can also pass here the ``FIRST_DEFAULT_ARG`` constant in order to select the first default argument. This way you turn all default arguments into keyword-only arguments. As a shortcut you can use the ``@kwonly_defaults`` decorator (without any parameters) instead of ``@first_kwonly_arg(FIRST_DEFAULT_ARG)``. >>> from kwonly_args import first_kwonly_arg, KWONLY_REQUIRED, FIRST_DEFAULT_ARG, kwonly_defaults >>> >>> # this decoration converts the ``d1`` and ``d2`` default args into kwonly args >>> @first_kwonly_arg('d1') >>> def func(a0, a1, d0='d0', d1='d1', d2='d2', *args, **kwargs): >>> print(a0, a1, d0, d1, d2, args, kwargs) >>> >>> func(0, 1, 2, 3, 4) 0 1 2 d1 d2 (3, 4) {} >>> >>> func(0, 1, 2, 3, 4, d2='my_param') 0 1 2 d1 my_param (3, 4) {} >>> >>> # d0 is an optional deyword argument, d1 is required >>> def func(d0='d0', d1=KWONLY_REQUIRED): >>> print(d0, d1) >>> >>> # The ``FIRST_DEFAULT_ARG`` constant automatically selects the first default argument so it >>> # turns all default arguments into keyword-only ones. Both d0 and d1 are keyword-only arguments. >>> @first_kwonly_arg(FIRST_DEFAULT_ARG) >>> def func(a0, a1, d0='d0', d1='d1'): >>> print(a0, a1, d0, d1) >>> >>> # ``@kwonly_defaults`` is a shortcut for the ``@first_kwonly_arg(FIRST_DEFAULT_ARG)`` >>> # in the previous example. This example has the same effect as the previous one. >>> @kwonly_defaults >>> def func(a0, a1, d0='d0', d1='d1'): >>> print(a0, a1, d0, d1) """ def decorate(wrapped): if sys.version_info[0] == 2: arg_names, varargs, _, defaults = inspect.getargspec(wrapped) else: arg_names, varargs, _, defaults = inspect.getfullargspec(wrapped)[:4] if not defaults: raise TypeError("You can't use @first_kwonly_arg on a function that doesn't have default arguments!") first_default_index = len(arg_names) - len(defaults) if name is FIRST_DEFAULT_ARG: first_kwonly_index = first_default_index else: try: first_kwonly_index = arg_names.index(name) except ValueError: raise ValueError("%s() doesn't have an argument with the specified first_kwonly_arg=%r name" % ( getattr(wrapped, '__name__', '?'), name)) if first_kwonly_index < first_default_index: raise ValueError("The specified first_kwonly_arg=%r must have a default value!" % (name,)) kwonly_defaults = defaults[-(len(arg_names)-first_kwonly_index):] kwonly_args = tuple(zip(arg_names[first_kwonly_index:], kwonly_defaults)) required_kwonly_args = frozenset(arg for arg, default in kwonly_args if default is KWONLY_REQUIRED) def wrapper(*args, **kwargs): if required_kwonly_args: missing_kwonly_args = required_kwonly_args.difference(kwargs.keys()) if missing_kwonly_args: raise TypeError("%s() missing %s keyword-only argument(s): %s" % ( getattr(wrapped, '__name__', '?'), len(missing_kwonly_args), ', '.join(sorted(missing_kwonly_args)))) if len(args) > first_kwonly_index: if varargs is None: raise TypeError("%s() takes exactly %s arguments (%s given)" % ( getattr(wrapped, '__name__', '?'), first_kwonly_index, len(args))) kwonly_args_from_kwargs = tuple(kwargs.pop(arg, default) for arg, default in kwonly_args) args = args[:first_kwonly_index] + kwonly_args_from_kwargs + args[first_kwonly_index:] return wrapped(*args, **kwargs) return update_wrapper(wrapper, wrapped) return decorate
def first_kwonly_arg(name): """ Emulates keyword-only arguments under python2. Works with both python2 and python3. With this decorator you can convert all or some of the default arguments of your function into kwonly arguments. Use ``KWONLY_REQUIRED`` as the default value of required kwonly args. :param name: The name of the first default argument to be treated as a keyword-only argument. This default argument along with all default arguments that follow this one will be treated as keyword only arguments. You can also pass here the ``FIRST_DEFAULT_ARG`` constant in order to select the first default argument. This way you turn all default arguments into keyword-only arguments. As a shortcut you can use the ``@kwonly_defaults`` decorator (without any parameters) instead of ``@first_kwonly_arg(FIRST_DEFAULT_ARG)``. >>> from kwonly_args import first_kwonly_arg, KWONLY_REQUIRED, FIRST_DEFAULT_ARG, kwonly_defaults >>> >>> # this decoration converts the ``d1`` and ``d2`` default args into kwonly args >>> @first_kwonly_arg('d1') >>> def func(a0, a1, d0='d0', d1='d1', d2='d2', *args, **kwargs): >>> print(a0, a1, d0, d1, d2, args, kwargs) >>> >>> func(0, 1, 2, 3, 4) 0 1 2 d1 d2 (3, 4) {} >>> >>> func(0, 1, 2, 3, 4, d2='my_param') 0 1 2 d1 my_param (3, 4) {} >>> >>> # d0 is an optional deyword argument, d1 is required >>> def func(d0='d0', d1=KWONLY_REQUIRED): >>> print(d0, d1) >>> >>> # The ``FIRST_DEFAULT_ARG`` constant automatically selects the first default argument so it >>> # turns all default arguments into keyword-only ones. Both d0 and d1 are keyword-only arguments. >>> @first_kwonly_arg(FIRST_DEFAULT_ARG) >>> def func(a0, a1, d0='d0', d1='d1'): >>> print(a0, a1, d0, d1) >>> >>> # ``@kwonly_defaults`` is a shortcut for the ``@first_kwonly_arg(FIRST_DEFAULT_ARG)`` >>> # in the previous example. This example has the same effect as the previous one. >>> @kwonly_defaults >>> def func(a0, a1, d0='d0', d1='d1'): >>> print(a0, a1, d0, d1) """ def decorate(wrapped): if sys.version_info[0] == 2: arg_names, varargs, _, defaults = inspect.getargspec(wrapped) else: arg_names, varargs, _, defaults = inspect.getfullargspec(wrapped)[:4] if not defaults: raise TypeError("You can't use @first_kwonly_arg on a function that doesn't have default arguments!") first_default_index = len(arg_names) - len(defaults) if name is FIRST_DEFAULT_ARG: first_kwonly_index = first_default_index else: try: first_kwonly_index = arg_names.index(name) except ValueError: raise ValueError("%s() doesn't have an argument with the specified first_kwonly_arg=%r name" % ( getattr(wrapped, '__name__', '?'), name)) if first_kwonly_index < first_default_index: raise ValueError("The specified first_kwonly_arg=%r must have a default value!" % (name,)) kwonly_defaults = defaults[-(len(arg_names)-first_kwonly_index):] kwonly_args = tuple(zip(arg_names[first_kwonly_index:], kwonly_defaults)) required_kwonly_args = frozenset(arg for arg, default in kwonly_args if default is KWONLY_REQUIRED) def wrapper(*args, **kwargs): if required_kwonly_args: missing_kwonly_args = required_kwonly_args.difference(kwargs.keys()) if missing_kwonly_args: raise TypeError("%s() missing %s keyword-only argument(s): %s" % ( getattr(wrapped, '__name__', '?'), len(missing_kwonly_args), ', '.join(sorted(missing_kwonly_args)))) if len(args) > first_kwonly_index: if varargs is None: raise TypeError("%s() takes exactly %s arguments (%s given)" % ( getattr(wrapped, '__name__', '?'), first_kwonly_index, len(args))) kwonly_args_from_kwargs = tuple(kwargs.pop(arg, default) for arg, default in kwonly_args) args = args[:first_kwonly_index] + kwonly_args_from_kwargs + args[first_kwonly_index:] return wrapped(*args, **kwargs) return update_wrapper(wrapper, wrapped) return decorate
[ "Emulates", "keyword", "-", "only", "arguments", "under", "python2", ".", "Works", "with", "both", "python2", "and", "python3", ".", "With", "this", "decorator", "you", "can", "convert", "all", "or", "some", "of", "the", "default", "arguments", "of", "your", "function", "into", "kwonly", "arguments", ".", "Use", "KWONLY_REQUIRED", "as", "the", "default", "value", "of", "required", "kwonly", "args", "." ]
pasztorpisti/kwonly-args
python
https://github.com/pasztorpisti/kwonly-args/blob/640b7f89514cce2167f510d661e0e5f65126d70d/src/kwonly_args/__init__.py#L35-L119
[ "def", "first_kwonly_arg", "(", "name", ")", ":", "def", "decorate", "(", "wrapped", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", "==", "2", ":", "arg_names", ",", "varargs", ",", "_", ",", "defaults", "=", "inspect", ".", "getargspec", "(", "wrapped", ")", "else", ":", "arg_names", ",", "varargs", ",", "_", ",", "defaults", "=", "inspect", ".", "getfullargspec", "(", "wrapped", ")", "[", ":", "4", "]", "if", "not", "defaults", ":", "raise", "TypeError", "(", "\"You can't use @first_kwonly_arg on a function that doesn't have default arguments!\"", ")", "first_default_index", "=", "len", "(", "arg_names", ")", "-", "len", "(", "defaults", ")", "if", "name", "is", "FIRST_DEFAULT_ARG", ":", "first_kwonly_index", "=", "first_default_index", "else", ":", "try", ":", "first_kwonly_index", "=", "arg_names", ".", "index", "(", "name", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"%s() doesn't have an argument with the specified first_kwonly_arg=%r name\"", "%", "(", "getattr", "(", "wrapped", ",", "'__name__'", ",", "'?'", ")", ",", "name", ")", ")", "if", "first_kwonly_index", "<", "first_default_index", ":", "raise", "ValueError", "(", "\"The specified first_kwonly_arg=%r must have a default value!\"", "%", "(", "name", ",", ")", ")", "kwonly_defaults", "=", "defaults", "[", "-", "(", "len", "(", "arg_names", ")", "-", "first_kwonly_index", ")", ":", "]", "kwonly_args", "=", "tuple", "(", "zip", "(", "arg_names", "[", "first_kwonly_index", ":", "]", ",", "kwonly_defaults", ")", ")", "required_kwonly_args", "=", "frozenset", "(", "arg", "for", "arg", ",", "default", "in", "kwonly_args", "if", "default", "is", "KWONLY_REQUIRED", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "required_kwonly_args", ":", "missing_kwonly_args", "=", "required_kwonly_args", ".", "difference", "(", "kwargs", ".", "keys", "(", ")", ")", "if", "missing_kwonly_args", ":", "raise", "TypeError", "(", "\"%s() missing %s keyword-only argument(s): %s\"", "%", "(", "getattr", "(", "wrapped", ",", "'__name__'", ",", "'?'", ")", ",", "len", "(", "missing_kwonly_args", ")", ",", "', '", ".", "join", "(", "sorted", "(", "missing_kwonly_args", ")", ")", ")", ")", "if", "len", "(", "args", ")", ">", "first_kwonly_index", ":", "if", "varargs", "is", "None", ":", "raise", "TypeError", "(", "\"%s() takes exactly %s arguments (%s given)\"", "%", "(", "getattr", "(", "wrapped", ",", "'__name__'", ",", "'?'", ")", ",", "first_kwonly_index", ",", "len", "(", "args", ")", ")", ")", "kwonly_args_from_kwargs", "=", "tuple", "(", "kwargs", ".", "pop", "(", "arg", ",", "default", ")", "for", "arg", ",", "default", "in", "kwonly_args", ")", "args", "=", "args", "[", ":", "first_kwonly_index", "]", "+", "kwonly_args_from_kwargs", "+", "args", "[", "first_kwonly_index", ":", "]", "return", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "update_wrapper", "(", "wrapper", ",", "wrapped", ")", "return", "decorate" ]
640b7f89514cce2167f510d661e0e5f65126d70d
test
devpiserver_on_upload
called when a file is uploaded to a private stage for a projectname/version. link.entry.file_exists() may be false because a more recent revision deleted the file (and files are not revisioned). NOTE that this hook is currently NOT called for the implicit "caching" uploads to the pypi mirror. If the uploaded file is a wheel and is the latest version on this index, store its metadata in json file at the root of index/+f/ directory. With the standard config with nginx, nginx will directly serve this file.
devpi_metawheel/main.py
def devpiserver_on_upload(stage, project, version, link): """ called when a file is uploaded to a private stage for a projectname/version. link.entry.file_exists() may be false because a more recent revision deleted the file (and files are not revisioned). NOTE that this hook is currently NOT called for the implicit "caching" uploads to the pypi mirror. If the uploaded file is a wheel and is the latest version on this index, store its metadata in json file at the root of index/+f/ directory. With the standard config with nginx, nginx will directly serve this file. """ if link.entry and link.entry.file_exists() and link.entry.basename.endswith('.whl'): threadlog.info("Wheel detected: %s", link.entry.basename) new_version = parse_version(version) latest_version = parse_version(stage.get_latest_version_perstage(project)) if latest_version > new_version: threadlog.debug("A newer release has already been uploaded: %s - nothing to do", latest_version) return metadata = extract_metadata_from_wheel_file(link.entry.file_os_path()) linkstore = stage.get_linkstore_perstage(link.project, link.version) project_dir = '%s/%s/+f/%s' % (linkstore.filestore.storedir, stage.name, project) if not os.path.exists(project_dir): os.mkdir(project_dir) json_path = '%s/%s-%s.json' % (project_dir, project, new_version) with open(json_path, 'w') as fd: fd.write(json.dumps(metadata)) threadlog.info("Stored %s to: %s", metadata, json_path) # We symlink the latest version symlink_path = '%s.json' % project_dir if os.path.exists(symlink_path): os.unlink(symlink_path) os.symlink(json_path, symlink_path)
def devpiserver_on_upload(stage, project, version, link): """ called when a file is uploaded to a private stage for a projectname/version. link.entry.file_exists() may be false because a more recent revision deleted the file (and files are not revisioned). NOTE that this hook is currently NOT called for the implicit "caching" uploads to the pypi mirror. If the uploaded file is a wheel and is the latest version on this index, store its metadata in json file at the root of index/+f/ directory. With the standard config with nginx, nginx will directly serve this file. """ if link.entry and link.entry.file_exists() and link.entry.basename.endswith('.whl'): threadlog.info("Wheel detected: %s", link.entry.basename) new_version = parse_version(version) latest_version = parse_version(stage.get_latest_version_perstage(project)) if latest_version > new_version: threadlog.debug("A newer release has already been uploaded: %s - nothing to do", latest_version) return metadata = extract_metadata_from_wheel_file(link.entry.file_os_path()) linkstore = stage.get_linkstore_perstage(link.project, link.version) project_dir = '%s/%s/+f/%s' % (linkstore.filestore.storedir, stage.name, project) if not os.path.exists(project_dir): os.mkdir(project_dir) json_path = '%s/%s-%s.json' % (project_dir, project, new_version) with open(json_path, 'w') as fd: fd.write(json.dumps(metadata)) threadlog.info("Stored %s to: %s", metadata, json_path) # We symlink the latest version symlink_path = '%s.json' % project_dir if os.path.exists(symlink_path): os.unlink(symlink_path) os.symlink(json_path, symlink_path)
[ "called", "when", "a", "file", "is", "uploaded", "to", "a", "private", "stage", "for", "a", "projectname", "/", "version", ".", "link", ".", "entry", ".", "file_exists", "()", "may", "be", "false", "because", "a", "more", "recent", "revision", "deleted", "the", "file", "(", "and", "files", "are", "not", "revisioned", ")", ".", "NOTE", "that", "this", "hook", "is", "currently", "NOT", "called", "for", "the", "implicit", "caching", "uploads", "to", "the", "pypi", "mirror", "." ]
Polyconseil/devpi-metawheel
python
https://github.com/Polyconseil/devpi-metawheel/blob/8794f7b74c406979aa4a35fa1cd28e1e9c8864ae/devpi_metawheel/main.py#L17-L52
[ "def", "devpiserver_on_upload", "(", "stage", ",", "project", ",", "version", ",", "link", ")", ":", "if", "link", ".", "entry", "and", "link", ".", "entry", ".", "file_exists", "(", ")", "and", "link", ".", "entry", ".", "basename", ".", "endswith", "(", "'.whl'", ")", ":", "threadlog", ".", "info", "(", "\"Wheel detected: %s\"", ",", "link", ".", "entry", ".", "basename", ")", "new_version", "=", "parse_version", "(", "version", ")", "latest_version", "=", "parse_version", "(", "stage", ".", "get_latest_version_perstage", "(", "project", ")", ")", "if", "latest_version", ">", "new_version", ":", "threadlog", ".", "debug", "(", "\"A newer release has already been uploaded: %s - nothing to do\"", ",", "latest_version", ")", "return", "metadata", "=", "extract_metadata_from_wheel_file", "(", "link", ".", "entry", ".", "file_os_path", "(", ")", ")", "linkstore", "=", "stage", ".", "get_linkstore_perstage", "(", "link", ".", "project", ",", "link", ".", "version", ")", "project_dir", "=", "'%s/%s/+f/%s'", "%", "(", "linkstore", ".", "filestore", ".", "storedir", ",", "stage", ".", "name", ",", "project", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "project_dir", ")", ":", "os", ".", "mkdir", "(", "project_dir", ")", "json_path", "=", "'%s/%s-%s.json'", "%", "(", "project_dir", ",", "project", ",", "new_version", ")", "with", "open", "(", "json_path", ",", "'w'", ")", "as", "fd", ":", "fd", ".", "write", "(", "json", ".", "dumps", "(", "metadata", ")", ")", "threadlog", ".", "info", "(", "\"Stored %s to: %s\"", ",", "metadata", ",", "json_path", ")", "# We symlink the latest version", "symlink_path", "=", "'%s.json'", "%", "project_dir", "if", "os", ".", "path", ".", "exists", "(", "symlink_path", ")", ":", "os", ".", "unlink", "(", "symlink_path", ")", "os", ".", "symlink", "(", "json_path", ",", "symlink_path", ")" ]
8794f7b74c406979aa4a35fa1cd28e1e9c8864ae
test
snap
Args: instruction (string): a string that encodes 0 to n transformations of a time, i.e. "-1h@h", "@mon+2d+4h", ... dttm (datetime): Returns: datetime: The datetime resulting from applying all transformations to the input datetime. Example: >>> snap(datetime(2016, 1, 1, 15, 30), "-1h@h") datetime(2016, 1, 1, 14)
snaptime/main.py
def snap(dttm, instruction): """ Args: instruction (string): a string that encodes 0 to n transformations of a time, i.e. "-1h@h", "@mon+2d+4h", ... dttm (datetime): Returns: datetime: The datetime resulting from applying all transformations to the input datetime. Example: >>> snap(datetime(2016, 1, 1, 15, 30), "-1h@h") datetime(2016, 1, 1, 14) """ transformations = parse(instruction) return reduce(lambda dt, transformation: transformation.apply_to(dt), transformations, dttm)
def snap(dttm, instruction): """ Args: instruction (string): a string that encodes 0 to n transformations of a time, i.e. "-1h@h", "@mon+2d+4h", ... dttm (datetime): Returns: datetime: The datetime resulting from applying all transformations to the input datetime. Example: >>> snap(datetime(2016, 1, 1, 15, 30), "-1h@h") datetime(2016, 1, 1, 14) """ transformations = parse(instruction) return reduce(lambda dt, transformation: transformation.apply_to(dt), transformations, dttm)
[ "Args", ":", "instruction", "(", "string", ")", ":", "a", "string", "that", "encodes", "0", "to", "n", "transformations", "of", "a", "time", "i", ".", "e", ".", "-", "1h@h", "@mon", "+", "2d", "+", "4h", "...", "dttm", "(", "datetime", ")", ":", "Returns", ":", "datetime", ":", "The", "datetime", "resulting", "from", "applying", "all", "transformations", "to", "the", "input", "datetime", "." ]
zartstrom/snaptime
python
https://github.com/zartstrom/snaptime/blob/b05ae09d4dccb1b5c8c4ace9c1937b8139672a3c/snaptime/main.py#L169-L182
[ "def", "snap", "(", "dttm", ",", "instruction", ")", ":", "transformations", "=", "parse", "(", "instruction", ")", "return", "reduce", "(", "lambda", "dt", ",", "transformation", ":", "transformation", ".", "apply_to", "(", "dt", ")", ",", "transformations", ",", "dttm", ")" ]
b05ae09d4dccb1b5c8c4ace9c1937b8139672a3c
test
snap_tz
This function handles timezone aware datetimes. Sometimes it is necessary to keep daylight saving time switches in mind. Args: instruction (string): a string that encodes 0 to n transformations of a time, i.e. "-1h@h", "@mon+2d+4h", ... dttm (datetime): a datetime with timezone timezone: a pytz timezone Returns: datetime: The datetime resulting from applying all transformations to the input datetime. Example: >>> import pytz >>> CET = pytz.timezone("Europe/Berlin") >>> dttm = CET.localize(datetime(2017, 3, 26, 3, 44) >>> dttm datetime.datetime(2017, 3, 26, 3, 44, tzinfo=<DstTzInfo 'Europe/Berlin' CEST+2:00:00 DST>) >>> snap_tz(dttm, "-2h@h", CET) datetime.datetime(2017, 3, 26, 0, 0, tzinfo=<DstTzInfo 'Europe/Berlin' CET+1:00:00 STD>) >>> # switch from winter to summer time!
snaptime/main.py
def snap_tz(dttm, instruction, timezone): """This function handles timezone aware datetimes. Sometimes it is necessary to keep daylight saving time switches in mind. Args: instruction (string): a string that encodes 0 to n transformations of a time, i.e. "-1h@h", "@mon+2d+4h", ... dttm (datetime): a datetime with timezone timezone: a pytz timezone Returns: datetime: The datetime resulting from applying all transformations to the input datetime. Example: >>> import pytz >>> CET = pytz.timezone("Europe/Berlin") >>> dttm = CET.localize(datetime(2017, 3, 26, 3, 44) >>> dttm datetime.datetime(2017, 3, 26, 3, 44, tzinfo=<DstTzInfo 'Europe/Berlin' CEST+2:00:00 DST>) >>> snap_tz(dttm, "-2h@h", CET) datetime.datetime(2017, 3, 26, 0, 0, tzinfo=<DstTzInfo 'Europe/Berlin' CET+1:00:00 STD>) >>> # switch from winter to summer time! """ transformations = parse(instruction) return reduce(lambda dt, transformation: transformation.apply_to_with_tz(dt, timezone), transformations, dttm)
def snap_tz(dttm, instruction, timezone): """This function handles timezone aware datetimes. Sometimes it is necessary to keep daylight saving time switches in mind. Args: instruction (string): a string that encodes 0 to n transformations of a time, i.e. "-1h@h", "@mon+2d+4h", ... dttm (datetime): a datetime with timezone timezone: a pytz timezone Returns: datetime: The datetime resulting from applying all transformations to the input datetime. Example: >>> import pytz >>> CET = pytz.timezone("Europe/Berlin") >>> dttm = CET.localize(datetime(2017, 3, 26, 3, 44) >>> dttm datetime.datetime(2017, 3, 26, 3, 44, tzinfo=<DstTzInfo 'Europe/Berlin' CEST+2:00:00 DST>) >>> snap_tz(dttm, "-2h@h", CET) datetime.datetime(2017, 3, 26, 0, 0, tzinfo=<DstTzInfo 'Europe/Berlin' CET+1:00:00 STD>) >>> # switch from winter to summer time! """ transformations = parse(instruction) return reduce(lambda dt, transformation: transformation.apply_to_with_tz(dt, timezone), transformations, dttm)
[ "This", "function", "handles", "timezone", "aware", "datetimes", ".", "Sometimes", "it", "is", "necessary", "to", "keep", "daylight", "saving", "time", "switches", "in", "mind", "." ]
zartstrom/snaptime
python
https://github.com/zartstrom/snaptime/blob/b05ae09d4dccb1b5c8c4ace9c1937b8139672a3c/snaptime/main.py#L185-L208
[ "def", "snap_tz", "(", "dttm", ",", "instruction", ",", "timezone", ")", ":", "transformations", "=", "parse", "(", "instruction", ")", "return", "reduce", "(", "lambda", "dt", ",", "transformation", ":", "transformation", ".", "apply_to_with_tz", "(", "dt", ",", "timezone", ")", ",", "transformations", ",", "dttm", ")" ]
b05ae09d4dccb1b5c8c4ace9c1937b8139672a3c
test
SnapTransformation.apply_to_with_tz
We make sure that after truncating we use the correct timezone, even if we 'jump' over a daylight saving time switch. I.e. if we apply "@d" to `Sun Oct 30 04:30:00 CET 2016` (1477798200) we want to have `Sun Oct 30 00:00:00 CEST 2016` (1477778400) but not `Sun Oct 30 00:00:00 CET 2016` (1477782000)
snaptime/main.py
def apply_to_with_tz(self, dttm, timezone): """We make sure that after truncating we use the correct timezone, even if we 'jump' over a daylight saving time switch. I.e. if we apply "@d" to `Sun Oct 30 04:30:00 CET 2016` (1477798200) we want to have `Sun Oct 30 00:00:00 CEST 2016` (1477778400) but not `Sun Oct 30 00:00:00 CET 2016` (1477782000) """ result = self.apply_to(dttm) if self.unit in [DAYS, WEEKS, MONTHS, YEARS]: naive_dttm = datetime(result.year, result.month, result.day) result = timezone.localize(naive_dttm) return result
def apply_to_with_tz(self, dttm, timezone): """We make sure that after truncating we use the correct timezone, even if we 'jump' over a daylight saving time switch. I.e. if we apply "@d" to `Sun Oct 30 04:30:00 CET 2016` (1477798200) we want to have `Sun Oct 30 00:00:00 CEST 2016` (1477778400) but not `Sun Oct 30 00:00:00 CET 2016` (1477782000) """ result = self.apply_to(dttm) if self.unit in [DAYS, WEEKS, MONTHS, YEARS]: naive_dttm = datetime(result.year, result.month, result.day) result = timezone.localize(naive_dttm) return result
[ "We", "make", "sure", "that", "after", "truncating", "we", "use", "the", "correct", "timezone", "even", "if", "we", "jump", "over", "a", "daylight", "saving", "time", "switch", "." ]
zartstrom/snaptime
python
https://github.com/zartstrom/snaptime/blob/b05ae09d4dccb1b5c8c4ace9c1937b8139672a3c/snaptime/main.py#L119-L131
[ "def", "apply_to_with_tz", "(", "self", ",", "dttm", ",", "timezone", ")", ":", "result", "=", "self", ".", "apply_to", "(", "dttm", ")", "if", "self", ".", "unit", "in", "[", "DAYS", ",", "WEEKS", ",", "MONTHS", ",", "YEARS", "]", ":", "naive_dttm", "=", "datetime", "(", "result", ".", "year", ",", "result", ".", "month", ",", "result", ".", "day", ")", "result", "=", "timezone", ".", "localize", "(", "naive_dttm", ")", "return", "result" ]
b05ae09d4dccb1b5c8c4ace9c1937b8139672a3c
test
Barcode.save
Renders the barcode and saves it in `filename`. :parameters: filename : String Filename to save the barcode in (without filename extension). options : Dict The same as in `self.render`. :returns: The full filename with extension. :rtype: String
barcode/base.py
def save(self, filename, options=None): """Renders the barcode and saves it in `filename`. :parameters: filename : String Filename to save the barcode in (without filename extension). options : Dict The same as in `self.render`. :returns: The full filename with extension. :rtype: String """ output = self.render(options) _filename = self.writer.save(filename, output) return _filename
def save(self, filename, options=None): """Renders the barcode and saves it in `filename`. :parameters: filename : String Filename to save the barcode in (without filename extension). options : Dict The same as in `self.render`. :returns: The full filename with extension. :rtype: String """ output = self.render(options) _filename = self.writer.save(filename, output) return _filename
[ "Renders", "the", "barcode", "and", "saves", "it", "in", "filename", "." ]
kxepal/viivakoodi
python
https://github.com/kxepal/viivakoodi/blob/79f5e866465f481982f9870c31f49a815e921c28/barcode/base.py#L54-L69
[ "def", "save", "(", "self", ",", "filename", ",", "options", "=", "None", ")", ":", "output", "=", "self", ".", "render", "(", "options", ")", "_filename", "=", "self", ".", "writer", ".", "save", "(", "filename", ",", "output", ")", "return", "_filename" ]
79f5e866465f481982f9870c31f49a815e921c28
test
Barcode.render
Renders the barcode using `self.writer`. :parameters: writer_options : Dict Options for `self.writer`, see writer docs for details. :returns: Output of the writers render method.
barcode/base.py
def render(self, writer_options=None): """Renders the barcode using `self.writer`. :parameters: writer_options : Dict Options for `self.writer`, see writer docs for details. :returns: Output of the writers render method. """ options = Barcode.default_writer_options.copy() options.update(writer_options or {}) if options['write_text']: options['text'] = self.get_fullcode() self.writer.set_options(options) code = self.build() raw = Barcode.raw = self.writer.render(code) return raw
def render(self, writer_options=None): """Renders the barcode using `self.writer`. :parameters: writer_options : Dict Options for `self.writer`, see writer docs for details. :returns: Output of the writers render method. """ options = Barcode.default_writer_options.copy() options.update(writer_options or {}) if options['write_text']: options['text'] = self.get_fullcode() self.writer.set_options(options) code = self.build() raw = Barcode.raw = self.writer.render(code) return raw
[ "Renders", "the", "barcode", "using", "self", ".", "writer", "." ]
kxepal/viivakoodi
python
https://github.com/kxepal/viivakoodi/blob/79f5e866465f481982f9870c31f49a815e921c28/barcode/base.py#L87-L103
[ "def", "render", "(", "self", ",", "writer_options", "=", "None", ")", ":", "options", "=", "Barcode", ".", "default_writer_options", ".", "copy", "(", ")", "options", ".", "update", "(", "writer_options", "or", "{", "}", ")", "if", "options", "[", "'write_text'", "]", ":", "options", "[", "'text'", "]", "=", "self", ".", "get_fullcode", "(", ")", "self", ".", "writer", ".", "set_options", "(", "options", ")", "code", "=", "self", ".", "build", "(", ")", "raw", "=", "Barcode", ".", "raw", "=", "self", ".", "writer", ".", "render", "(", "code", ")", "return", "raw" ]
79f5e866465f481982f9870c31f49a815e921c28
test
EuropeanArticleNumber13.calculate_checksum
Calculates the checksum for EAN13-Code. :returns: The checksum for `self.ean`. :rtype: Integer
barcode/ean.py
def calculate_checksum(self): """Calculates the checksum for EAN13-Code. :returns: The checksum for `self.ean`. :rtype: Integer """ def sum_(x, y): return int(x) + int(y) evensum = reduce(sum_, self.ean[::2]) oddsum = reduce(sum_, self.ean[1::2]) return (10 - ((evensum + oddsum * 3) % 10)) % 10
def calculate_checksum(self): """Calculates the checksum for EAN13-Code. :returns: The checksum for `self.ean`. :rtype: Integer """ def sum_(x, y): return int(x) + int(y) evensum = reduce(sum_, self.ean[::2]) oddsum = reduce(sum_, self.ean[1::2]) return (10 - ((evensum + oddsum * 3) % 10)) % 10
[ "Calculates", "the", "checksum", "for", "EAN13", "-", "Code", "." ]
kxepal/viivakoodi
python
https://github.com/kxepal/viivakoodi/blob/79f5e866465f481982f9870c31f49a815e921c28/barcode/ean.py#L55-L66
[ "def", "calculate_checksum", "(", "self", ")", ":", "def", "sum_", "(", "x", ",", "y", ")", ":", "return", "int", "(", "x", ")", "+", "int", "(", "y", ")", "evensum", "=", "reduce", "(", "sum_", ",", "self", ".", "ean", "[", ":", ":", "2", "]", ")", "oddsum", "=", "reduce", "(", "sum_", ",", "self", ".", "ean", "[", "1", ":", ":", "2", "]", ")", "return", "(", "10", "-", "(", "(", "evensum", "+", "oddsum", "*", "3", ")", "%", "10", ")", ")", "%", "10" ]
79f5e866465f481982f9870c31f49a815e921c28
test
BaseWriter.render
Renders the barcode to whatever the inheriting writer provides, using the registered callbacks. :parameters: code : List List of strings matching the writer spec (only contain 0 or 1).
barcode/writer.py
def render(self, code): """Renders the barcode to whatever the inheriting writer provides, using the registered callbacks. :parameters: code : List List of strings matching the writer spec (only contain 0 or 1). """ if self._callbacks['initialize'] is not None: self._callbacks['initialize'](code) ypos = 1.0 for line in code: # Left quiet zone is x startposition xpos = self.quiet_zone for mod in line: if mod == '0': color = self.background else: color = self.foreground self._callbacks['paint_module'](xpos, ypos, self.module_width, color) xpos += self.module_width # Add right quiet zone to every line self._callbacks['paint_module'](xpos, ypos, self.quiet_zone, self.background) ypos += self.module_height if self.text and self._callbacks['paint_text'] is not None: ypos += self.text_distance if self.center_text: xpos = xpos / 2.0 else: xpos = self.quiet_zone + 4.0 self._callbacks['paint_text'](xpos, ypos) return self._callbacks['finish']()
def render(self, code): """Renders the barcode to whatever the inheriting writer provides, using the registered callbacks. :parameters: code : List List of strings matching the writer spec (only contain 0 or 1). """ if self._callbacks['initialize'] is not None: self._callbacks['initialize'](code) ypos = 1.0 for line in code: # Left quiet zone is x startposition xpos = self.quiet_zone for mod in line: if mod == '0': color = self.background else: color = self.foreground self._callbacks['paint_module'](xpos, ypos, self.module_width, color) xpos += self.module_width # Add right quiet zone to every line self._callbacks['paint_module'](xpos, ypos, self.quiet_zone, self.background) ypos += self.module_height if self.text and self._callbacks['paint_text'] is not None: ypos += self.text_distance if self.center_text: xpos = xpos / 2.0 else: xpos = self.quiet_zone + 4.0 self._callbacks['paint_text'](xpos, ypos) return self._callbacks['finish']()
[ "Renders", "the", "barcode", "to", "whatever", "the", "inheriting", "writer", "provides", "using", "the", "registered", "callbacks", "." ]
kxepal/viivakoodi
python
https://github.com/kxepal/viivakoodi/blob/79f5e866465f481982f9870c31f49a815e921c28/barcode/writer.py#L158-L192
[ "def", "render", "(", "self", ",", "code", ")", ":", "if", "self", ".", "_callbacks", "[", "'initialize'", "]", "is", "not", "None", ":", "self", ".", "_callbacks", "[", "'initialize'", "]", "(", "code", ")", "ypos", "=", "1.0", "for", "line", "in", "code", ":", "# Left quiet zone is x startposition", "xpos", "=", "self", ".", "quiet_zone", "for", "mod", "in", "line", ":", "if", "mod", "==", "'0'", ":", "color", "=", "self", ".", "background", "else", ":", "color", "=", "self", ".", "foreground", "self", ".", "_callbacks", "[", "'paint_module'", "]", "(", "xpos", ",", "ypos", ",", "self", ".", "module_width", ",", "color", ")", "xpos", "+=", "self", ".", "module_width", "# Add right quiet zone to every line", "self", ".", "_callbacks", "[", "'paint_module'", "]", "(", "xpos", ",", "ypos", ",", "self", ".", "quiet_zone", ",", "self", ".", "background", ")", "ypos", "+=", "self", ".", "module_height", "if", "self", ".", "text", "and", "self", ".", "_callbacks", "[", "'paint_text'", "]", "is", "not", "None", ":", "ypos", "+=", "self", ".", "text_distance", "if", "self", ".", "center_text", ":", "xpos", "=", "xpos", "/", "2.0", "else", ":", "xpos", "=", "self", ".", "quiet_zone", "+", "4.0", "self", ".", "_callbacks", "[", "'paint_text'", "]", "(", "xpos", ",", "ypos", ")", "return", "self", ".", "_callbacks", "[", "'finish'", "]", "(", ")" ]
79f5e866465f481982f9870c31f49a815e921c28
test
SangrealQuery.to_df
[pandas.read_sql] Arguments: Query {[type]} -- [description] Returns: [pd.DataFrame or generate] -- [description]
sangreal_db/orm/query.py
def to_df(self, **kwargs): """[pandas.read_sql] Arguments: Query {[type]} -- [description] Returns: [pd.DataFrame or generate] -- [description] """ return pd.read_sql(sql=self.statement, con=self.session.bind, **kwargs)
def to_df(self, **kwargs): """[pandas.read_sql] Arguments: Query {[type]} -- [description] Returns: [pd.DataFrame or generate] -- [description] """ return pd.read_sql(sql=self.statement, con=self.session.bind, **kwargs)
[ "[", "pandas", ".", "read_sql", "]", "Arguments", ":", "Query", "{", "[", "type", "]", "}", "--", "[", "description", "]", "Returns", ":", "[", "pd", ".", "DataFrame", "or", "generate", "]", "--", "[", "description", "]" ]
liubola/sangreal-db
python
https://github.com/liubola/sangreal-db/blob/f9b6339421d8d308e0ebc2a42c432ca99026c264/sangreal_db/orm/query.py#L6-L16
[ "def", "to_df", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "pd", ".", "read_sql", "(", "sql", "=", "self", ".", "statement", ",", "con", "=", "self", ".", "session", ".", "bind", ",", "*", "*", "kwargs", ")" ]
f9b6339421d8d308e0ebc2a42c432ca99026c264
test
PerlSession.connect
Call that method in the pyramid configuration phase.
pyramid_kvs/perlsess.py
def connect(cls, settings): """ Call that method in the pyramid configuration phase. """ server = serializer('json').loads(settings['kvs.perlsess']) server.setdefault('key_prefix', 'perlsess::') server.setdefault('codec', 'storable') cls.cookie_name = server.pop('cookie_name', 'session_id') cls.client = KVS(**server)
def connect(cls, settings): """ Call that method in the pyramid configuration phase. """ server = serializer('json').loads(settings['kvs.perlsess']) server.setdefault('key_prefix', 'perlsess::') server.setdefault('codec', 'storable') cls.cookie_name = server.pop('cookie_name', 'session_id') cls.client = KVS(**server)
[ "Call", "that", "method", "in", "the", "pyramid", "configuration", "phase", "." ]
Gandi/pyramid_kvs
python
https://github.com/Gandi/pyramid_kvs/blob/36285f2e50d8181428f383f6fc1d79a34ea9ac3c/pyramid_kvs/perlsess.py#L31-L38
[ "def", "connect", "(", "cls", ",", "settings", ")", ":", "server", "=", "serializer", "(", "'json'", ")", ".", "loads", "(", "settings", "[", "'kvs.perlsess'", "]", ")", "server", ".", "setdefault", "(", "'key_prefix'", ",", "'perlsess::'", ")", "server", ".", "setdefault", "(", "'codec'", ",", "'storable'", ")", "cls", ".", "cookie_name", "=", "server", ".", "pop", "(", "'cookie_name'", ",", "'session_id'", ")", "cls", ".", "client", "=", "KVS", "(", "*", "*", "server", ")" ]
36285f2e50d8181428f383f6fc1d79a34ea9ac3c
test
main
Simple command line tool to help manage environment variables stored in a S3-like system. Facilitates editing text files remotely stored, as well as downloading and uploading files.
s3conf/client.py
def main(ctx, edit, create): """ Simple command line tool to help manage environment variables stored in a S3-like system. Facilitates editing text files remotely stored, as well as downloading and uploading files. """ # configs this module logger to behave properly # logger messages will go to stderr (check __init__.py/patch.py) # client output should be generated with click.echo() to go to stdout try: click_log.basic_config('s3conf') logger.debug('Running main entrypoint') if edit: if ctx.invoked_subcommand is None: logger.debug('Using config file %s', config.LOCAL_CONFIG_FILE) config.ConfigFileResolver(config.LOCAL_CONFIG_FILE).edit(create=create) return else: raise UsageError('Edit should not be called with a subcommand.') # manually call help in case no relevant settings were defined if ctx.invoked_subcommand is None: click.echo(main.get_help(ctx)) except exceptions.FileDoesNotExist as e: raise UsageError('The file {} does not exist. Try "-c" option if you want to create it.'.format(str(e)))
def main(ctx, edit, create): """ Simple command line tool to help manage environment variables stored in a S3-like system. Facilitates editing text files remotely stored, as well as downloading and uploading files. """ # configs this module logger to behave properly # logger messages will go to stderr (check __init__.py/patch.py) # client output should be generated with click.echo() to go to stdout try: click_log.basic_config('s3conf') logger.debug('Running main entrypoint') if edit: if ctx.invoked_subcommand is None: logger.debug('Using config file %s', config.LOCAL_CONFIG_FILE) config.ConfigFileResolver(config.LOCAL_CONFIG_FILE).edit(create=create) return else: raise UsageError('Edit should not be called with a subcommand.') # manually call help in case no relevant settings were defined if ctx.invoked_subcommand is None: click.echo(main.get_help(ctx)) except exceptions.FileDoesNotExist as e: raise UsageError('The file {} does not exist. Try "-c" option if you want to create it.'.format(str(e)))
[ "Simple", "command", "line", "tool", "to", "help", "manage", "environment", "variables", "stored", "in", "a", "S3", "-", "like", "system", ".", "Facilitates", "editing", "text", "files", "remotely", "stored", "as", "well", "as", "downloading", "and", "uploading", "files", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L40-L62
[ "def", "main", "(", "ctx", ",", "edit", ",", "create", ")", ":", "# configs this module logger to behave properly", "# logger messages will go to stderr (check __init__.py/patch.py)", "# client output should be generated with click.echo() to go to stdout", "try", ":", "click_log", ".", "basic_config", "(", "'s3conf'", ")", "logger", ".", "debug", "(", "'Running main entrypoint'", ")", "if", "edit", ":", "if", "ctx", ".", "invoked_subcommand", "is", "None", ":", "logger", ".", "debug", "(", "'Using config file %s'", ",", "config", ".", "LOCAL_CONFIG_FILE", ")", "config", ".", "ConfigFileResolver", "(", "config", ".", "LOCAL_CONFIG_FILE", ")", ".", "edit", "(", "create", "=", "create", ")", "return", "else", ":", "raise", "UsageError", "(", "'Edit should not be called with a subcommand.'", ")", "# manually call help in case no relevant settings were defined", "if", "ctx", ".", "invoked_subcommand", "is", "None", ":", "click", ".", "echo", "(", "main", ".", "get_help", "(", "ctx", ")", ")", "except", "exceptions", ".", "FileDoesNotExist", "as", "e", ":", "raise", "UsageError", "(", "'The file {} does not exist. Try \"-c\" option if you want to create it.'", ".", "format", "(", "str", "(", "e", ")", ")", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
env
Reads the file defined by the S3CONF variable and output its contents to stdout. Logs are printed to stderr. See options for added functionality: editing file, mapping files, dumping in the phusion-baseimage format, etc.
s3conf/client.py
def env(section, map_files, phusion, phusion_path, quiet, edit, create): """ Reads the file defined by the S3CONF variable and output its contents to stdout. Logs are printed to stderr. See options for added functionality: editing file, mapping files, dumping in the phusion-baseimage format, etc. """ try: logger.debug('Running env command') settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) if edit: conf.edit(create=create) else: env_vars = conf.get_envfile().as_dict() if env_vars.get('S3CONF_MAP') and map_files: conf.download_mapping(env_vars.get('S3CONF_MAP')) if not quiet: for var_name, var_value in sorted(env_vars.items(), key=lambda x: x[0]): click.echo('{}={}'.format(var_name, var_value)) if phusion: s3conf.phusion_dump(env_vars, phusion_path) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError() except exceptions.FileDoesNotExist as e: raise UsageError('The file {} does not exist. Try "-c" option if you want to create it.'.format(str(e)))
def env(section, map_files, phusion, phusion_path, quiet, edit, create): """ Reads the file defined by the S3CONF variable and output its contents to stdout. Logs are printed to stderr. See options for added functionality: editing file, mapping files, dumping in the phusion-baseimage format, etc. """ try: logger.debug('Running env command') settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) if edit: conf.edit(create=create) else: env_vars = conf.get_envfile().as_dict() if env_vars.get('S3CONF_MAP') and map_files: conf.download_mapping(env_vars.get('S3CONF_MAP')) if not quiet: for var_name, var_value in sorted(env_vars.items(), key=lambda x: x[0]): click.echo('{}={}'.format(var_name, var_value)) if phusion: s3conf.phusion_dump(env_vars, phusion_path) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError() except exceptions.FileDoesNotExist as e: raise UsageError('The file {} does not exist. Try "-c" option if you want to create it.'.format(str(e)))
[ "Reads", "the", "file", "defined", "by", "the", "S3CONF", "variable", "and", "output", "its", "contents", "to", "stdout", ".", "Logs", "are", "printed", "to", "stderr", ".", "See", "options", "for", "added", "functionality", ":", "editing", "file", "mapping", "files", "dumping", "in", "the", "phusion", "-", "baseimage", "format", "etc", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L92-L117
[ "def", "env", "(", "section", ",", "map_files", ",", "phusion", ",", "phusion_path", ",", "quiet", ",", "edit", ",", "create", ")", ":", "try", ":", "logger", ".", "debug", "(", "'Running env command'", ")", "settings", "=", "config", ".", "Settings", "(", "section", "=", "section", ")", "storage", "=", "STORAGES", "[", "'s3'", "]", "(", "settings", "=", "settings", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "storage", "=", "storage", ",", "settings", "=", "settings", ")", "if", "edit", ":", "conf", ".", "edit", "(", "create", "=", "create", ")", "else", ":", "env_vars", "=", "conf", ".", "get_envfile", "(", ")", ".", "as_dict", "(", ")", "if", "env_vars", ".", "get", "(", "'S3CONF_MAP'", ")", "and", "map_files", ":", "conf", ".", "download_mapping", "(", "env_vars", ".", "get", "(", "'S3CONF_MAP'", ")", ")", "if", "not", "quiet", ":", "for", "var_name", ",", "var_value", "in", "sorted", "(", "env_vars", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", ":", "click", ".", "echo", "(", "'{}={}'", ".", "format", "(", "var_name", ",", "var_value", ")", ")", "if", "phusion", ":", "s3conf", ".", "phusion_dump", "(", "env_vars", ",", "phusion_path", ")", "except", "exceptions", ".", "EnvfilePathNotDefinedError", ":", "raise", "exceptions", ".", "EnvfilePathNotDefinedUsageError", "(", ")", "except", "exceptions", ".", "FileDoesNotExist", "as", "e", ":", "raise", "UsageError", "(", "'The file {} does not exist. Try \"-c\" option if you want to create it.'", ".", "format", "(", "str", "(", "e", ")", ")", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
exec_command
Sets the process environemnt and executes the [COMMAND] in the same context. Does not modify the current shell environment. If the [COMMAND] has option-like arguments, use the standard POSIX pattern "--" to separate options from arguments. Considering our configuration in the "dev" section, we could write: s3conf -v info exec dev -- ping -v google.com
s3conf/client.py
def exec_command(ctx, section, command, map_files): """ Sets the process environemnt and executes the [COMMAND] in the same context. Does not modify the current shell environment. If the [COMMAND] has option-like arguments, use the standard POSIX pattern "--" to separate options from arguments. Considering our configuration in the "dev" section, we could write: s3conf -v info exec dev -- ping -v google.com """ try: logger.debug('Running exec command') existing_sections = config.ConfigFileResolver(config.LOCAL_CONFIG_FILE).sections() command = ' '.join(command) if section not in existing_sections: command = '{} {}'.format(section, command) if command else section section = None if not command: logger.warning('No command detected.') click.echo(exec_command.get_help(ctx)) return settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) env_vars = conf.get_envfile().as_dict() if env_vars.get('S3CONF_MAP') and map_files: conf.download_mapping(env_vars.get('S3CONF_MAP')) current_env = os.environ.copy() current_env.update(env_vars) logger.debug('Executing command "%s"', command) subprocess.run(shlex.split(command), env=current_env, check=True) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
def exec_command(ctx, section, command, map_files): """ Sets the process environemnt and executes the [COMMAND] in the same context. Does not modify the current shell environment. If the [COMMAND] has option-like arguments, use the standard POSIX pattern "--" to separate options from arguments. Considering our configuration in the "dev" section, we could write: s3conf -v info exec dev -- ping -v google.com """ try: logger.debug('Running exec command') existing_sections = config.ConfigFileResolver(config.LOCAL_CONFIG_FILE).sections() command = ' '.join(command) if section not in existing_sections: command = '{} {}'.format(section, command) if command else section section = None if not command: logger.warning('No command detected.') click.echo(exec_command.get_help(ctx)) return settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) env_vars = conf.get_envfile().as_dict() if env_vars.get('S3CONF_MAP') and map_files: conf.download_mapping(env_vars.get('S3CONF_MAP')) current_env = os.environ.copy() current_env.update(env_vars) logger.debug('Executing command "%s"', command) subprocess.run(shlex.split(command), env=current_env, check=True) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
[ "Sets", "the", "process", "environemnt", "and", "executes", "the", "[", "COMMAND", "]", "in", "the", "same", "context", ".", "Does", "not", "modify", "the", "current", "shell", "environment", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L135-L171
[ "def", "exec_command", "(", "ctx", ",", "section", ",", "command", ",", "map_files", ")", ":", "try", ":", "logger", ".", "debug", "(", "'Running exec command'", ")", "existing_sections", "=", "config", ".", "ConfigFileResolver", "(", "config", ".", "LOCAL_CONFIG_FILE", ")", ".", "sections", "(", ")", "command", "=", "' '", ".", "join", "(", "command", ")", "if", "section", "not", "in", "existing_sections", ":", "command", "=", "'{} {}'", ".", "format", "(", "section", ",", "command", ")", "if", "command", "else", "section", "section", "=", "None", "if", "not", "command", ":", "logger", ".", "warning", "(", "'No command detected.'", ")", "click", ".", "echo", "(", "exec_command", ".", "get_help", "(", "ctx", ")", ")", "return", "settings", "=", "config", ".", "Settings", "(", "section", "=", "section", ")", "storage", "=", "STORAGES", "[", "'s3'", "]", "(", "settings", "=", "settings", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "storage", "=", "storage", ",", "settings", "=", "settings", ")", "env_vars", "=", "conf", ".", "get_envfile", "(", ")", ".", "as_dict", "(", ")", "if", "env_vars", ".", "get", "(", "'S3CONF_MAP'", ")", "and", "map_files", ":", "conf", ".", "download_mapping", "(", "env_vars", ".", "get", "(", "'S3CONF_MAP'", ")", ")", "current_env", "=", "os", ".", "environ", ".", "copy", "(", ")", "current_env", ".", "update", "(", "env_vars", ")", "logger", ".", "debug", "(", "'Executing command \"%s\"'", ",", "command", ")", "subprocess", ".", "run", "(", "shlex", ".", "split", "(", "command", ")", ",", "env", "=", "current_env", ",", "check", "=", "True", ")", "except", "exceptions", ".", "EnvfilePathNotDefinedError", ":", "raise", "exceptions", ".", "EnvfilePathNotDefinedUsageError", "(", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
download
Download a file or folder from the S3-like service. If REMOTE_PATH has a trailing slash it is considered to be a folder, e.g.: "s3://my-bucket/my-folder/". In this case, LOCAL_PATH must be a folder as well. The files and subfolder structure in REMOTE_PATH are copied to LOCAL_PATH. If REMOTE_PATH does not have a trailing slash, it is considered to be a file, and LOCAL_PATH should be a file as well.
s3conf/client.py
def download(remote_path, local_path): """ Download a file or folder from the S3-like service. If REMOTE_PATH has a trailing slash it is considered to be a folder, e.g.: "s3://my-bucket/my-folder/". In this case, LOCAL_PATH must be a folder as well. The files and subfolder structure in REMOTE_PATH are copied to LOCAL_PATH. If REMOTE_PATH does not have a trailing slash, it is considered to be a file, and LOCAL_PATH should be a file as well. """ storage = STORAGES['s3']() conf = s3conf.S3Conf(storage=storage) conf.download(remote_path, local_path)
def download(remote_path, local_path): """ Download a file or folder from the S3-like service. If REMOTE_PATH has a trailing slash it is considered to be a folder, e.g.: "s3://my-bucket/my-folder/". In this case, LOCAL_PATH must be a folder as well. The files and subfolder structure in REMOTE_PATH are copied to LOCAL_PATH. If REMOTE_PATH does not have a trailing slash, it is considered to be a file, and LOCAL_PATH should be a file as well. """ storage = STORAGES['s3']() conf = s3conf.S3Conf(storage=storage) conf.download(remote_path, local_path)
[ "Download", "a", "file", "or", "folder", "from", "the", "S3", "-", "like", "service", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L177-L190
[ "def", "download", "(", "remote_path", ",", "local_path", ")", ":", "storage", "=", "STORAGES", "[", "'s3'", "]", "(", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "storage", "=", "storage", ")", "conf", ".", "download", "(", "remote_path", ",", "local_path", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
upload
Upload a file or folder to the S3-like service. If LOCAL_PATH is a folder, the files and subfolder structure in LOCAL_PATH are copied to REMOTE_PATH. If LOCAL_PATH is a file, the REMOTE_PATH file is created with the same contents.
s3conf/client.py
def upload(remote_path, local_path): """ Upload a file or folder to the S3-like service. If LOCAL_PATH is a folder, the files and subfolder structure in LOCAL_PATH are copied to REMOTE_PATH. If LOCAL_PATH is a file, the REMOTE_PATH file is created with the same contents. """ storage = STORAGES['s3']() conf = s3conf.S3Conf(storage=storage) conf.upload(local_path, remote_path)
def upload(remote_path, local_path): """ Upload a file or folder to the S3-like service. If LOCAL_PATH is a folder, the files and subfolder structure in LOCAL_PATH are copied to REMOTE_PATH. If LOCAL_PATH is a file, the REMOTE_PATH file is created with the same contents. """ storage = STORAGES['s3']() conf = s3conf.S3Conf(storage=storage) conf.upload(local_path, remote_path)
[ "Upload", "a", "file", "or", "folder", "to", "the", "S3", "-", "like", "service", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L196-L206
[ "def", "upload", "(", "remote_path", ",", "local_path", ")", ":", "storage", "=", "STORAGES", "[", "'s3'", "]", "(", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "storage", "=", "storage", ")", "conf", ".", "upload", "(", "local_path", ",", "remote_path", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
downsync
For each section defined in the local config file, creates a folder inside the local config folder named after the section. Downloads the environemnt file defined by the S3CONF variable for this section to this folder.
s3conf/client.py
def downsync(section, map_files): """ For each section defined in the local config file, creates a folder inside the local config folder named after the section. Downloads the environemnt file defined by the S3CONF variable for this section to this folder. """ try: settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) local_root = os.path.join(config.LOCAL_CONFIG_FOLDER, section) conf.downsync(local_root, map_files=map_files) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
def downsync(section, map_files): """ For each section defined in the local config file, creates a folder inside the local config folder named after the section. Downloads the environemnt file defined by the S3CONF variable for this section to this folder. """ try: settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) local_root = os.path.join(config.LOCAL_CONFIG_FOLDER, section) conf.downsync(local_root, map_files=map_files) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
[ "For", "each", "section", "defined", "in", "the", "local", "config", "file", "creates", "a", "folder", "inside", "the", "local", "config", "folder", "named", "after", "the", "section", ".", "Downloads", "the", "environemnt", "file", "defined", "by", "the", "S3CONF", "variable", "for", "this", "section", "to", "this", "folder", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L217-L230
[ "def", "downsync", "(", "section", ",", "map_files", ")", ":", "try", ":", "settings", "=", "config", ".", "Settings", "(", "section", "=", "section", ")", "storage", "=", "STORAGES", "[", "'s3'", "]", "(", "settings", "=", "settings", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "storage", "=", "storage", ",", "settings", "=", "settings", ")", "local_root", "=", "os", ".", "path", ".", "join", "(", "config", ".", "LOCAL_CONFIG_FOLDER", ",", "section", ")", "conf", ".", "downsync", "(", "local_root", ",", "map_files", "=", "map_files", ")", "except", "exceptions", ".", "EnvfilePathNotDefinedError", ":", "raise", "exceptions", ".", "EnvfilePathNotDefinedUsageError", "(", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
diff
For each section defined in the local config file, look up for a folder inside the local config folder named after the section. Uploads the environemnt file named as in the S3CONF variable for this section to the remote S3CONF path.
s3conf/client.py
def diff(section): """ For each section defined in the local config file, look up for a folder inside the local config folder named after the section. Uploads the environemnt file named as in the S3CONF variable for this section to the remote S3CONF path. """ try: settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) local_root = os.path.join(config.LOCAL_CONFIG_FOLDER, section) click.echo(''.join(conf.diff(local_root))) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
def diff(section): """ For each section defined in the local config file, look up for a folder inside the local config folder named after the section. Uploads the environemnt file named as in the S3CONF variable for this section to the remote S3CONF path. """ try: settings = config.Settings(section=section) storage = STORAGES['s3'](settings=settings) conf = s3conf.S3Conf(storage=storage, settings=settings) local_root = os.path.join(config.LOCAL_CONFIG_FOLDER, section) click.echo(''.join(conf.diff(local_root))) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
[ "For", "each", "section", "defined", "in", "the", "local", "config", "file", "look", "up", "for", "a", "folder", "inside", "the", "local", "config", "folder", "named", "after", "the", "section", ".", "Uploads", "the", "environemnt", "file", "named", "as", "in", "the", "S3CONF", "variable", "for", "this", "section", "to", "the", "remote", "S3CONF", "path", "." ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L259-L272
[ "def", "diff", "(", "section", ")", ":", "try", ":", "settings", "=", "config", ".", "Settings", "(", "section", "=", "section", ")", "storage", "=", "STORAGES", "[", "'s3'", "]", "(", "settings", "=", "settings", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "storage", "=", "storage", ",", "settings", "=", "settings", ")", "local_root", "=", "os", ".", "path", ".", "join", "(", "config", ".", "LOCAL_CONFIG_FOLDER", ",", "section", ")", "click", ".", "echo", "(", "''", ".", "join", "(", "conf", ".", "diff", "(", "local_root", ")", ")", ")", "except", "exceptions", ".", "EnvfilePathNotDefinedError", ":", "raise", "exceptions", ".", "EnvfilePathNotDefinedUsageError", "(", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
set_variable
Set value of a variable in an environment file for the given section. If the variable is already defined, its value is replaced, otherwise, it is added to the end of the file. The value is given as "ENV_VAR_NAME=env_var_value", e.g.: s3conf set test ENV_VAR_NAME=env_var_value
s3conf/client.py
def set_variable(section, value, create): """ Set value of a variable in an environment file for the given section. If the variable is already defined, its value is replaced, otherwise, it is added to the end of the file. The value is given as "ENV_VAR_NAME=env_var_value", e.g.: s3conf set test ENV_VAR_NAME=env_var_value """ if not value: value = section section = None try: logger.debug('Running env command') settings = config.Settings(section=section) conf = s3conf.S3Conf(settings=settings) env_vars = conf.get_envfile() env_vars.set(value, create=create) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
def set_variable(section, value, create): """ Set value of a variable in an environment file for the given section. If the variable is already defined, its value is replaced, otherwise, it is added to the end of the file. The value is given as "ENV_VAR_NAME=env_var_value", e.g.: s3conf set test ENV_VAR_NAME=env_var_value """ if not value: value = section section = None try: logger.debug('Running env command') settings = config.Settings(section=section) conf = s3conf.S3Conf(settings=settings) env_vars = conf.get_envfile() env_vars.set(value, create=create) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
[ "Set", "value", "of", "a", "variable", "in", "an", "environment", "file", "for", "the", "given", "section", ".", "If", "the", "variable", "is", "already", "defined", "its", "value", "is", "replaced", "otherwise", "it", "is", "added", "to", "the", "end", "of", "the", "file", ".", "The", "value", "is", "given", "as", "ENV_VAR_NAME", "=", "env_var_value", "e", ".", "g", ".", ":" ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L283-L302
[ "def", "set_variable", "(", "section", ",", "value", ",", "create", ")", ":", "if", "not", "value", ":", "value", "=", "section", "section", "=", "None", "try", ":", "logger", ".", "debug", "(", "'Running env command'", ")", "settings", "=", "config", ".", "Settings", "(", "section", "=", "section", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "settings", "=", "settings", ")", "env_vars", "=", "conf", ".", "get_envfile", "(", ")", "env_vars", ".", "set", "(", "value", ",", "create", "=", "create", ")", "except", "exceptions", ".", "EnvfilePathNotDefinedError", ":", "raise", "exceptions", ".", "EnvfilePathNotDefinedUsageError", "(", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
unset_variable
Unset a variable in an environment file for the given section. The value is given is the variable name, e.g.: s3conf unset test ENV_VAR_NAME
s3conf/client.py
def unset_variable(section, value): """ Unset a variable in an environment file for the given section. The value is given is the variable name, e.g.: s3conf unset test ENV_VAR_NAME """ if not value: value = section section = None try: logger.debug('Running env command') settings = config.Settings(section=section) conf = s3conf.S3Conf(settings=settings) env_vars = conf.get_envfile() env_vars.unset(value) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
def unset_variable(section, value): """ Unset a variable in an environment file for the given section. The value is given is the variable name, e.g.: s3conf unset test ENV_VAR_NAME """ if not value: value = section section = None try: logger.debug('Running env command') settings = config.Settings(section=section) conf = s3conf.S3Conf(settings=settings) env_vars = conf.get_envfile() env_vars.unset(value) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
[ "Unset", "a", "variable", "in", "an", "environment", "file", "for", "the", "given", "section", ".", "The", "value", "is", "given", "is", "the", "variable", "name", "e", ".", "g", ".", ":" ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L309-L327
[ "def", "unset_variable", "(", "section", ",", "value", ")", ":", "if", "not", "value", ":", "value", "=", "section", "section", "=", "None", "try", ":", "logger", ".", "debug", "(", "'Running env command'", ")", "settings", "=", "config", ".", "Settings", "(", "section", "=", "section", ")", "conf", "=", "s3conf", ".", "S3Conf", "(", "settings", "=", "settings", ")", "env_vars", "=", "conf", ".", "get_envfile", "(", ")", "env_vars", ".", "unset", "(", "value", ")", "except", "exceptions", ".", "EnvfilePathNotDefinedError", ":", "raise", "exceptions", ".", "EnvfilePathNotDefinedUsageError", "(", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
init
Creates the .s3conf config folder and .s3conf/config config file with the provided section name and configuration file. It is a very basic config file. Manually edit it in order to add credentials. E.g.: s3conf init development s3://my-project/development.env
s3conf/client.py
def init(section, remote_file): """ Creates the .s3conf config folder and .s3conf/config config file with the provided section name and configuration file. It is a very basic config file. Manually edit it in order to add credentials. E.g.: s3conf init development s3://my-project/development.env """ if not remote_file.startswith('s3://'): raise UsageError('REMOTE_FILE must be a S3-like path. E.g.:\n\n' 's3conf init development s3://my-project/development.env') logger.debug('Running init command') config_file_path = os.path.join(os.getcwd(), '.s3conf', 'config') config_file = config.ConfigFileResolver(config_file_path, section=section) config_file.set('S3CONF', remote_file) gitignore_file_path = os.path.join(os.getcwd(), '.s3conf', '.gitignore') config_file.save() open(gitignore_file_path, 'w').write('*\n!config\n')
def init(section, remote_file): """ Creates the .s3conf config folder and .s3conf/config config file with the provided section name and configuration file. It is a very basic config file. Manually edit it in order to add credentials. E.g.: s3conf init development s3://my-project/development.env """ if not remote_file.startswith('s3://'): raise UsageError('REMOTE_FILE must be a S3-like path. E.g.:\n\n' 's3conf init development s3://my-project/development.env') logger.debug('Running init command') config_file_path = os.path.join(os.getcwd(), '.s3conf', 'config') config_file = config.ConfigFileResolver(config_file_path, section=section) config_file.set('S3CONF', remote_file) gitignore_file_path = os.path.join(os.getcwd(), '.s3conf', '.gitignore') config_file.save() open(gitignore_file_path, 'w').write('*\n!config\n')
[ "Creates", "the", ".", "s3conf", "config", "folder", "and", ".", "s3conf", "/", "config", "config", "file", "with", "the", "provided", "section", "name", "and", "configuration", "file", ".", "It", "is", "a", "very", "basic", "config", "file", ".", "Manually", "edit", "it", "in", "order", "to", "add", "credentials", ".", "E", ".", "g", ".", ":" ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/client.py#L333-L350
[ "def", "init", "(", "section", ",", "remote_file", ")", ":", "if", "not", "remote_file", ".", "startswith", "(", "'s3://'", ")", ":", "raise", "UsageError", "(", "'REMOTE_FILE must be a S3-like path. E.g.:\\n\\n'", "'s3conf init development s3://my-project/development.env'", ")", "logger", ".", "debug", "(", "'Running init command'", ")", "config_file_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'.s3conf'", ",", "'config'", ")", "config_file", "=", "config", ".", "ConfigFileResolver", "(", "config_file_path", ",", "section", "=", "section", ")", "config_file", ".", "set", "(", "'S3CONF'", ",", "remote_file", ")", "gitignore_file_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "'.s3conf'", ",", "'.gitignore'", ")", "config_file", ".", "save", "(", ")", "open", "(", "gitignore_file_path", ",", "'w'", ")", ".", "write", "(", "'*\\n!config\\n'", ")" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
DataBase.update
[update table] Arguments: t_obj {[objs of DeclarativeMeta]} -- [update the table]
sangreal_db/core/database.py
def update(self, t_obj): """[update table] Arguments: t_obj {[objs of DeclarativeMeta]} -- [update the table] """ if isinstance(t_obj, Iterable): self._session.add_all(t_obj) else: self._session.add(t_obj)
def update(self, t_obj): """[update table] Arguments: t_obj {[objs of DeclarativeMeta]} -- [update the table] """ if isinstance(t_obj, Iterable): self._session.add_all(t_obj) else: self._session.add(t_obj)
[ "[", "update", "table", "]" ]
liubola/sangreal-db
python
https://github.com/liubola/sangreal-db/blob/f9b6339421d8d308e0ebc2a42c432ca99026c264/sangreal_db/core/database.py#L102-L112
[ "def", "update", "(", "self", ",", "t_obj", ")", ":", "if", "isinstance", "(", "t_obj", ",", "Iterable", ")", ":", "self", ".", "_session", ".", "add_all", "(", "t_obj", ")", "else", ":", "self", ".", "_session", ".", "add", "(", "t_obj", ")" ]
f9b6339421d8d308e0ebc2a42c432ca99026c264
test
DataBase.insert
[insert bulk data] Arguments: table {[DeclarativeMeta cls]} -- [reflection of table] insert_obj {[pd.DataFrame or list of dicts]} -- [insert_obj] Keyword Arguments: ignore {bool} -- [wether ignore exception or not] (default: {True}) Raises: ValueError -- [f"The {reprlib.repr(insert_obj)} must be list of dicts type!"] Returns: [type] -- [description]
sangreal_db/core/database.py
def insert(self, table, insert_obj, ignore=True): """[insert bulk data] Arguments: table {[DeclarativeMeta cls]} -- [reflection of table] insert_obj {[pd.DataFrame or list of dicts]} -- [insert_obj] Keyword Arguments: ignore {bool} -- [wether ignore exception or not] (default: {True}) Raises: ValueError -- [f"The {reprlib.repr(insert_obj)} must be list of dicts type!"] Returns: [type] -- [description] """ if isinstance(insert_obj, pd.DataFrame): if insert_obj.empty: raise ValueError('The input DataFrame is empty, please check!') insert_obj = insert_obj.to_dict(orient='records') elif not isinstance(insert_obj, list): raise ValueError( f"The {reprlib.repr(insert_obj)} must be list of dicts type!") ignore_str = 'IGNORE' if ignore else '' return self._session.execute( table.__table__.insert().prefix_with(ignore_str), insert_obj)
def insert(self, table, insert_obj, ignore=True): """[insert bulk data] Arguments: table {[DeclarativeMeta cls]} -- [reflection of table] insert_obj {[pd.DataFrame or list of dicts]} -- [insert_obj] Keyword Arguments: ignore {bool} -- [wether ignore exception or not] (default: {True}) Raises: ValueError -- [f"The {reprlib.repr(insert_obj)} must be list of dicts type!"] Returns: [type] -- [description] """ if isinstance(insert_obj, pd.DataFrame): if insert_obj.empty: raise ValueError('The input DataFrame is empty, please check!') insert_obj = insert_obj.to_dict(orient='records') elif not isinstance(insert_obj, list): raise ValueError( f"The {reprlib.repr(insert_obj)} must be list of dicts type!") ignore_str = 'IGNORE' if ignore else '' return self._session.execute( table.__table__.insert().prefix_with(ignore_str), insert_obj)
[ "[", "insert", "bulk", "data", "]" ]
liubola/sangreal-db
python
https://github.com/liubola/sangreal-db/blob/f9b6339421d8d308e0ebc2a42c432ca99026c264/sangreal_db/core/database.py#L114-L141
[ "def", "insert", "(", "self", ",", "table", ",", "insert_obj", ",", "ignore", "=", "True", ")", ":", "if", "isinstance", "(", "insert_obj", ",", "pd", ".", "DataFrame", ")", ":", "if", "insert_obj", ".", "empty", ":", "raise", "ValueError", "(", "'The input DataFrame is empty, please check!'", ")", "insert_obj", "=", "insert_obj", ".", "to_dict", "(", "orient", "=", "'records'", ")", "elif", "not", "isinstance", "(", "insert_obj", ",", "list", ")", ":", "raise", "ValueError", "(", "f\"The {reprlib.repr(insert_obj)} must be list of dicts type!\"", ")", "ignore_str", "=", "'IGNORE'", "if", "ignore", "else", "''", "return", "self", ".", "_session", ".", "execute", "(", "table", ".", "__table__", ".", "insert", "(", ")", ".", "prefix_with", "(", "ignore_str", ")", ",", "insert_obj", ")" ]
f9b6339421d8d308e0ebc2a42c432ca99026c264
test
parse_env_var
Split a env var text like ENV_VAR_NAME=env_var_value into a tuple ('ENV_VAR_NAME', 'env_var_value')
s3conf/files.py
def parse_env_var(value): """ Split a env var text like ENV_VAR_NAME=env_var_value into a tuple ('ENV_VAR_NAME', 'env_var_value') """ k, _, v = value.partition('=') # Remove any leading and trailing spaces in key, value k, v = k.strip(), v.strip().encode('unicode-escape').decode('ascii') if v and v[0] == v[-1] in ['"', "'"]: v = __escape_decoder(v[1:-1])[0] return k, v
def parse_env_var(value): """ Split a env var text like ENV_VAR_NAME=env_var_value into a tuple ('ENV_VAR_NAME', 'env_var_value') """ k, _, v = value.partition('=') # Remove any leading and trailing spaces in key, value k, v = k.strip(), v.strip().encode('unicode-escape').decode('ascii') if v and v[0] == v[-1] in ['"', "'"]: v = __escape_decoder(v[1:-1])[0] return k, v
[ "Split", "a", "env", "var", "text", "like" ]
sbneto/s3conf
python
https://github.com/sbneto/s3conf/blob/92fd2973beccc85bb21d3157ff227929e62ed695/s3conf/files.py#L16-L31
[ "def", "parse_env_var", "(", "value", ")", ":", "k", ",", "_", ",", "v", "=", "value", ".", "partition", "(", "'='", ")", "# Remove any leading and trailing spaces in key, value", "k", ",", "v", "=", "k", ".", "strip", "(", ")", ",", "v", ".", "strip", "(", ")", ".", "encode", "(", "'unicode-escape'", ")", ".", "decode", "(", "'ascii'", ")", "if", "v", "and", "v", "[", "0", "]", "==", "v", "[", "-", "1", "]", "in", "[", "'\"'", ",", "\"'\"", "]", ":", "v", "=", "__escape_decoder", "(", "v", "[", "1", ":", "-", "1", "]", ")", "[", "0", "]", "return", "k", ",", "v" ]
92fd2973beccc85bb21d3157ff227929e62ed695
test
basic
Add basic authentication to the requests of the clients.
generators/python_client/schul_cloud_resources_api_v1/auth.py
def basic(username, password): """Add basic authentication to the requests of the clients.""" none() _config.username = username _config.password = password
def basic(username, password): """Add basic authentication to the requests of the clients.""" none() _config.username = username _config.password = password
[ "Add", "basic", "authentication", "to", "the", "requests", "of", "the", "clients", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/auth.py#L27-L31
[ "def", "basic", "(", "username", ",", "password", ")", ":", "none", "(", ")", "_config", ".", "username", "=", "username", "_config", ".", "password", "=", "password" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
api_key
Authenticate via an api key.
generators/python_client/schul_cloud_resources_api_v1/auth.py
def api_key(api_key): """Authenticate via an api key.""" none() _config.api_key_prefix["Authorization"] = "api-key" _config.api_key["Authorization"] = "key=" + b64encode(api_key.encode()).decode()
def api_key(api_key): """Authenticate via an api key.""" none() _config.api_key_prefix["Authorization"] = "api-key" _config.api_key["Authorization"] = "key=" + b64encode(api_key.encode()).decode()
[ "Authenticate", "via", "an", "api", "key", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/auth.py#L34-L38
[ "def", "api_key", "(", "api_key", ")", ":", "none", "(", ")", "_config", ".", "api_key_prefix", "[", "\"Authorization\"", "]", "=", "\"api-key\"", "_config", ".", "api_key", "[", "\"Authorization\"", "]", "=", "\"key=\"", "+", "b64encode", "(", "api_key", ".", "encode", "(", ")", ")", ".", "decode", "(", ")" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
_get_json_content_from_folder
yield objects from json files in the folder and subfolders.
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def _get_json_content_from_folder(folder): """yield objects from json files in the folder and subfolders.""" for dirpath, dirnames, filenames in os.walk(folder): for filename in filenames: if filename.lower().endswith(".json"): filepath = os.path.join(dirpath, filename) with open(filepath, "rb") as file: yield json.loads(file.read().decode("UTF-8"))
def _get_json_content_from_folder(folder): """yield objects from json files in the folder and subfolders.""" for dirpath, dirnames, filenames in os.walk(folder): for filename in filenames: if filename.lower().endswith(".json"): filepath = os.path.join(dirpath, filename) with open(filepath, "rb") as file: yield json.loads(file.read().decode("UTF-8"))
[ "yield", "objects", "from", "json", "files", "in", "the", "folder", "and", "subfolders", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L14-L21
[ "def", "_get_json_content_from_folder", "(", "folder", ")", ":", "for", "dirpath", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "folder", ")", ":", "for", "filename", "in", "filenames", ":", "if", "filename", ".", "lower", "(", ")", ".", "endswith", "(", "\".json\"", ")", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "filename", ")", "with", "open", "(", "filepath", ",", "\"rb\"", ")", "as", "file", ":", "yield", "json", ".", "loads", "(", "file", ".", "read", "(", ")", ".", "decode", "(", "\"UTF-8\"", ")", ")" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
get_schemas
Return a dict of schema names mapping to a Schema. The schema is of type schul_cloud_resources_api_v1.schema.Schema
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def get_schemas(): """Return a dict of schema names mapping to a Schema. The schema is of type schul_cloud_resources_api_v1.schema.Schema """ schemas = {} for name in os.listdir(JSON_PATH): if name not in NO_SCHEMA: schemas[name] = Schema(name) return schemas
def get_schemas(): """Return a dict of schema names mapping to a Schema. The schema is of type schul_cloud_resources_api_v1.schema.Schema """ schemas = {} for name in os.listdir(JSON_PATH): if name not in NO_SCHEMA: schemas[name] = Schema(name) return schemas
[ "Return", "a", "dict", "of", "schema", "names", "mapping", "to", "a", "Schema", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L86-L95
[ "def", "get_schemas", "(", ")", ":", "schemas", "=", "{", "}", "for", "name", "in", "os", ".", "listdir", "(", "JSON_PATH", ")", ":", "if", "name", "not", "in", "NO_SCHEMA", ":", "schemas", "[", "name", "]", "=", "Schema", "(", "name", ")", "return", "schemas" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
Schema.get_schema
Return the schema.
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def get_schema(self): """Return the schema.""" path = os.path.join(self._get_schema_folder(), self._name + ".json") with open(path, "rb") as file: schema = json.loads(file.read().decode("UTF-8")) return schema
def get_schema(self): """Return the schema.""" path = os.path.join(self._get_schema_folder(), self._name + ".json") with open(path, "rb") as file: schema = json.loads(file.read().decode("UTF-8")) return schema
[ "Return", "the", "schema", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L35-L40
[ "def", "get_schema", "(", "self", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_get_schema_folder", "(", ")", ",", "self", ".", "_name", "+", "\".json\"", ")", "with", "open", "(", "path", ",", "\"rb\"", ")", "as", "file", ":", "schema", "=", "json", ".", "loads", "(", "file", ".", "read", "(", ")", ".", "decode", "(", "\"UTF-8\"", ")", ")", "return", "schema" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
Schema.get_resolver
Return a jsonschema.RefResolver for the schemas. All schemas returned be get_schemas() are resolved locally.
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def get_resolver(self): """Return a jsonschema.RefResolver for the schemas. All schemas returned be get_schemas() are resolved locally. """ store = {} for schema in get_schemas().values(): store[schema.get_uri()] = schema.get_schema() schema = self.get_schema() return jsonschema.RefResolver.from_schema(schema, store=store)
def get_resolver(self): """Return a jsonschema.RefResolver for the schemas. All schemas returned be get_schemas() are resolved locally. """ store = {} for schema in get_schemas().values(): store[schema.get_uri()] = schema.get_schema() schema = self.get_schema() return jsonschema.RefResolver.from_schema(schema, store=store)
[ "Return", "a", "jsonschema", ".", "RefResolver", "for", "the", "schemas", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L46-L55
[ "def", "get_resolver", "(", "self", ")", ":", "store", "=", "{", "}", "for", "schema", "in", "get_schemas", "(", ")", ".", "values", "(", ")", ":", "store", "[", "schema", ".", "get_uri", "(", ")", "]", "=", "schema", ".", "get_schema", "(", ")", "schema", "=", "self", ".", "get_schema", "(", ")", "return", "jsonschema", ".", "RefResolver", ".", "from_schema", "(", "schema", ",", "store", "=", "store", ")" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
Schema.validate
Validate an object against the schema. This function just passes if the schema matches the object. If the object does not match the schema, a ValidationException is raised. This error allows debugging.
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def validate(self, object): """Validate an object against the schema. This function just passes if the schema matches the object. If the object does not match the schema, a ValidationException is raised. This error allows debugging. """ resolver=self.get_resolver() jsonschema.validate(object, self.get_schema(), resolver=resolver)
def validate(self, object): """Validate an object against the schema. This function just passes if the schema matches the object. If the object does not match the schema, a ValidationException is raised. This error allows debugging. """ resolver=self.get_resolver() jsonschema.validate(object, self.get_schema(), resolver=resolver)
[ "Validate", "an", "object", "against", "the", "schema", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L57-L65
[ "def", "validate", "(", "self", ",", "object", ")", ":", "resolver", "=", "self", ".", "get_resolver", "(", ")", "jsonschema", ".", "validate", "(", "object", ",", "self", ".", "get_schema", "(", ")", ",", "resolver", "=", "resolver", ")" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
Schema.get_valid_examples
Return a list of valid examples for the given schema.
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def get_valid_examples(self): """Return a list of valid examples for the given schema.""" path = os.path.join(self._get_schema_folder(), "examples", "valid") return list(_get_json_content_from_folder(path))
def get_valid_examples(self): """Return a list of valid examples for the given schema.""" path = os.path.join(self._get_schema_folder(), "examples", "valid") return list(_get_json_content_from_folder(path))
[ "Return", "a", "list", "of", "valid", "examples", "for", "the", "given", "schema", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L75-L78
[ "def", "get_valid_examples", "(", "self", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_get_schema_folder", "(", ")", ",", "\"examples\"", ",", "\"valid\"", ")", "return", "list", "(", "_get_json_content_from_folder", "(", "path", ")", ")" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
Schema.get_invalid_examples
Return a list of examples which violate the schema.
generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py
def get_invalid_examples(self): """Return a list of examples which violate the schema.""" path = os.path.join(self._get_schema_folder(), "examples", "invalid") return list(_get_json_content_from_folder(path))
def get_invalid_examples(self): """Return a list of examples which violate the schema.""" path = os.path.join(self._get_schema_folder(), "examples", "invalid") return list(_get_json_content_from_folder(path))
[ "Return", "a", "list", "of", "examples", "which", "violate", "the", "schema", "." ]
schul-cloud/resources-api-v1
python
https://github.com/schul-cloud/resources-api-v1/blob/58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d/generators/python_client/schul_cloud_resources_api_v1/schema/__init__.py#L80-L83
[ "def", "get_invalid_examples", "(", "self", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_get_schema_folder", "(", ")", ",", "\"examples\"", ",", "\"invalid\"", ")", "return", "list", "(", "_get_json_content_from_folder", "(", "path", ")", ")" ]
58b2d7ba13669fa013ef81c0ffcffbf6b3fdb52d
test
OneDriveHTTPClient.request
Make synchronous HTTP request. Can be overidden to use different http module (e.g. urllib2, twisted, etc).
onedrive/api_v5.py
def request( self, url, method='get', data=None, files=None, raw=False, raw_all=False, headers=dict(), raise_for=dict(), session=None ): '''Make synchronous HTTP request. Can be overidden to use different http module (e.g. urllib2, twisted, etc).''' try: import requests # import here to avoid dependency on the module except ImportError as exc: exc.args = ( 'Unable to find/import "requests" module.' ' Please make sure that it is installed, e.g. by running "pip install requests" command.' '\nFor more info, visit: http://docs.python-requests.org/en/latest/user/install/',) raise exc if not self._requests_setup_done: patched_session = self._requests_setup( requests, **(self.request_adapter_settings or dict()) ) if patched_session is not None: self._requests_session = patched_session if session is None: session = getattr(self, '_requests_session', None) if not session: session = self._requests_session = requests.session() elif not session: session = requests method = method.lower() kwz = (self._requests_base_keywords or dict()).copy() kwz.update(self.request_extra_keywords or dict()) kwz, func = dict(), ft.partial(session.request, method.upper(), **kwz) kwz_headers = (self.request_base_headers or dict()).copy() kwz_headers.update(headers) if data is not None: if method in ['post', 'put']: if all(hasattr(data, k) for k in ['seek', 'read']): # Force chunked encoding for files, as uploads hang otherwise # See https://github.com/mk-fg/python-onedrive/issues/30 for details data.seek(0) kwz['data'] = iter(ft.partial(data.read, 200 * 2**10), b'') else: kwz['data'] = data else: kwz['data'] = json.dumps(data) kwz_headers.setdefault('Content-Type', 'application/json') if files is not None: # requests-2+ doesn't seem to add default content-type header for k, file_tuple in files.iteritems(): if len(file_tuple) == 2: files[k] = tuple(file_tuple) + ('application/octet-stream',) # Rewind is necessary because request can be repeated due to auth failure file_tuple[1].seek(0) kwz['files'] = files if kwz_headers: kwz['headers'] = kwz_headers code = res = None try: res = func(url, **kwz) # log.debug('Response headers: %s', res.headers) code = res.status_code if code == requests.codes.no_content: return if code != requests.codes.ok: res.raise_for_status() except requests.RequestException as err: message = b'{0} [type: {1}, repr: {0!r}]'.format(err, type(err)) if (res and getattr(res, 'text', None)) is not None: # "res" with non-200 code can be falsy message = res.text try: message = json.loads(message) except: message = '{}: {!r}'.format(str(err), message)[:300] else: msg_err, msg_data = message.pop('error', None), message if msg_err: message = '{}: {}'.format(msg_err.get('code', err), msg_err.get('message', msg_err)) if msg_data: message = '{} (data: {})'.format(message, msg_data) raise raise_for.get(code, ProtocolError)(code, message) if raw: res = res.content elif raw_all: res = code, dict(res.headers.items()), res.content else: res = json.loads(res.text) return res
def request( self, url, method='get', data=None, files=None, raw=False, raw_all=False, headers=dict(), raise_for=dict(), session=None ): '''Make synchronous HTTP request. Can be overidden to use different http module (e.g. urllib2, twisted, etc).''' try: import requests # import here to avoid dependency on the module except ImportError as exc: exc.args = ( 'Unable to find/import "requests" module.' ' Please make sure that it is installed, e.g. by running "pip install requests" command.' '\nFor more info, visit: http://docs.python-requests.org/en/latest/user/install/',) raise exc if not self._requests_setup_done: patched_session = self._requests_setup( requests, **(self.request_adapter_settings or dict()) ) if patched_session is not None: self._requests_session = patched_session if session is None: session = getattr(self, '_requests_session', None) if not session: session = self._requests_session = requests.session() elif not session: session = requests method = method.lower() kwz = (self._requests_base_keywords or dict()).copy() kwz.update(self.request_extra_keywords or dict()) kwz, func = dict(), ft.partial(session.request, method.upper(), **kwz) kwz_headers = (self.request_base_headers or dict()).copy() kwz_headers.update(headers) if data is not None: if method in ['post', 'put']: if all(hasattr(data, k) for k in ['seek', 'read']): # Force chunked encoding for files, as uploads hang otherwise # See https://github.com/mk-fg/python-onedrive/issues/30 for details data.seek(0) kwz['data'] = iter(ft.partial(data.read, 200 * 2**10), b'') else: kwz['data'] = data else: kwz['data'] = json.dumps(data) kwz_headers.setdefault('Content-Type', 'application/json') if files is not None: # requests-2+ doesn't seem to add default content-type header for k, file_tuple in files.iteritems(): if len(file_tuple) == 2: files[k] = tuple(file_tuple) + ('application/octet-stream',) # Rewind is necessary because request can be repeated due to auth failure file_tuple[1].seek(0) kwz['files'] = files if kwz_headers: kwz['headers'] = kwz_headers code = res = None try: res = func(url, **kwz) # log.debug('Response headers: %s', res.headers) code = res.status_code if code == requests.codes.no_content: return if code != requests.codes.ok: res.raise_for_status() except requests.RequestException as err: message = b'{0} [type: {1}, repr: {0!r}]'.format(err, type(err)) if (res and getattr(res, 'text', None)) is not None: # "res" with non-200 code can be falsy message = res.text try: message = json.loads(message) except: message = '{}: {!r}'.format(str(err), message)[:300] else: msg_err, msg_data = message.pop('error', None), message if msg_err: message = '{}: {}'.format(msg_err.get('code', err), msg_err.get('message', msg_err)) if msg_data: message = '{} (data: {})'.format(message, msg_data) raise raise_for.get(code, ProtocolError)(code, message) if raw: res = res.content elif raw_all: res = code, dict(res.headers.items()), res.content else: res = json.loads(res.text) return res
[ "Make", "synchronous", "HTTP", "request", ".", "Can", "be", "overidden", "to", "use", "different", "http", "module", "(", "e", ".", "g", ".", "urllib2", "twisted", "etc", ")", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L148-L217
[ "def", "request", "(", "self", ",", "url", ",", "method", "=", "'get'", ",", "data", "=", "None", ",", "files", "=", "None", ",", "raw", "=", "False", ",", "raw_all", "=", "False", ",", "headers", "=", "dict", "(", ")", ",", "raise_for", "=", "dict", "(", ")", ",", "session", "=", "None", ")", ":", "try", ":", "import", "requests", "# import here to avoid dependency on the module", "except", "ImportError", "as", "exc", ":", "exc", ".", "args", "=", "(", "'Unable to find/import \"requests\" module.'", "' Please make sure that it is installed, e.g. by running \"pip install requests\" command.'", "'\\nFor more info, visit: http://docs.python-requests.org/en/latest/user/install/'", ",", ")", "raise", "exc", "if", "not", "self", ".", "_requests_setup_done", ":", "patched_session", "=", "self", ".", "_requests_setup", "(", "requests", ",", "*", "*", "(", "self", ".", "request_adapter_settings", "or", "dict", "(", ")", ")", ")", "if", "patched_session", "is", "not", "None", ":", "self", ".", "_requests_session", "=", "patched_session", "if", "session", "is", "None", ":", "session", "=", "getattr", "(", "self", ",", "'_requests_session'", ",", "None", ")", "if", "not", "session", ":", "session", "=", "self", ".", "_requests_session", "=", "requests", ".", "session", "(", ")", "elif", "not", "session", ":", "session", "=", "requests", "method", "=", "method", ".", "lower", "(", ")", "kwz", "=", "(", "self", ".", "_requests_base_keywords", "or", "dict", "(", ")", ")", ".", "copy", "(", ")", "kwz", ".", "update", "(", "self", ".", "request_extra_keywords", "or", "dict", "(", ")", ")", "kwz", ",", "func", "=", "dict", "(", ")", ",", "ft", ".", "partial", "(", "session", ".", "request", ",", "method", ".", "upper", "(", ")", ",", "*", "*", "kwz", ")", "kwz_headers", "=", "(", "self", ".", "request_base_headers", "or", "dict", "(", ")", ")", ".", "copy", "(", ")", "kwz_headers", ".", "update", "(", "headers", ")", "if", "data", "is", "not", "None", ":", "if", "method", "in", "[", "'post'", ",", "'put'", "]", ":", "if", "all", "(", "hasattr", "(", "data", ",", "k", ")", "for", "k", "in", "[", "'seek'", ",", "'read'", "]", ")", ":", "# Force chunked encoding for files, as uploads hang otherwise", "# See https://github.com/mk-fg/python-onedrive/issues/30 for details", "data", ".", "seek", "(", "0", ")", "kwz", "[", "'data'", "]", "=", "iter", "(", "ft", ".", "partial", "(", "data", ".", "read", ",", "200", "*", "2", "**", "10", ")", ",", "b''", ")", "else", ":", "kwz", "[", "'data'", "]", "=", "data", "else", ":", "kwz", "[", "'data'", "]", "=", "json", ".", "dumps", "(", "data", ")", "kwz_headers", ".", "setdefault", "(", "'Content-Type'", ",", "'application/json'", ")", "if", "files", "is", "not", "None", ":", "# requests-2+ doesn't seem to add default content-type header", "for", "k", ",", "file_tuple", "in", "files", ".", "iteritems", "(", ")", ":", "if", "len", "(", "file_tuple", ")", "==", "2", ":", "files", "[", "k", "]", "=", "tuple", "(", "file_tuple", ")", "+", "(", "'application/octet-stream'", ",", ")", "# Rewind is necessary because request can be repeated due to auth failure", "file_tuple", "[", "1", "]", ".", "seek", "(", "0", ")", "kwz", "[", "'files'", "]", "=", "files", "if", "kwz_headers", ":", "kwz", "[", "'headers'", "]", "=", "kwz_headers", "code", "=", "res", "=", "None", "try", ":", "res", "=", "func", "(", "url", ",", "*", "*", "kwz", ")", "# log.debug('Response headers: %s', res.headers)", "code", "=", "res", ".", "status_code", "if", "code", "==", "requests", ".", "codes", ".", "no_content", ":", "return", "if", "code", "!=", "requests", ".", "codes", ".", "ok", ":", "res", ".", "raise_for_status", "(", ")", "except", "requests", ".", "RequestException", "as", "err", ":", "message", "=", "b'{0} [type: {1}, repr: {0!r}]'", ".", "format", "(", "err", ",", "type", "(", "err", ")", ")", "if", "(", "res", "and", "getattr", "(", "res", ",", "'text'", ",", "None", ")", ")", "is", "not", "None", ":", "# \"res\" with non-200 code can be falsy", "message", "=", "res", ".", "text", "try", ":", "message", "=", "json", ".", "loads", "(", "message", ")", "except", ":", "message", "=", "'{}: {!r}'", ".", "format", "(", "str", "(", "err", ")", ",", "message", ")", "[", ":", "300", "]", "else", ":", "msg_err", ",", "msg_data", "=", "message", ".", "pop", "(", "'error'", ",", "None", ")", ",", "message", "if", "msg_err", ":", "message", "=", "'{}: {}'", ".", "format", "(", "msg_err", ".", "get", "(", "'code'", ",", "err", ")", ",", "msg_err", ".", "get", "(", "'message'", ",", "msg_err", ")", ")", "if", "msg_data", ":", "message", "=", "'{} (data: {})'", ".", "format", "(", "message", ",", "msg_data", ")", "raise", "raise_for", ".", "get", "(", "code", ",", "ProtocolError", ")", "(", "code", ",", "message", ")", "if", "raw", ":", "res", "=", "res", ".", "content", "elif", "raw_all", ":", "res", "=", "code", ",", "dict", "(", "res", ".", "headers", ".", "items", "(", ")", ")", ",", "res", ".", "content", "else", ":", "res", "=", "json", ".", "loads", "(", "res", ".", "text", ")", "return", "res" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAuth.auth_user_get_url
Build authorization URL for User Agent.
onedrive/api_v5.py
def auth_user_get_url(self, scope=None): 'Build authorization URL for User Agent.' if not self.client_id: raise AuthMissingError('No client_id specified') return '{}?{}'.format(self.auth_url_user, urllib.urlencode(dict( client_id=self.client_id, scope=' '.join(scope or self.auth_scope), response_type='code', redirect_uri=self.auth_redirect_uri )))
def auth_user_get_url(self, scope=None): 'Build authorization URL for User Agent.' if not self.client_id: raise AuthMissingError('No client_id specified') return '{}?{}'.format(self.auth_url_user, urllib.urlencode(dict( client_id=self.client_id, scope=' '.join(scope or self.auth_scope), response_type='code', redirect_uri=self.auth_redirect_uri )))
[ "Build", "authorization", "URL", "for", "User", "Agent", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L253-L258
[ "def", "auth_user_get_url", "(", "self", ",", "scope", "=", "None", ")", ":", "if", "not", "self", ".", "client_id", ":", "raise", "AuthMissingError", "(", "'No client_id specified'", ")", "return", "'{}?{}'", ".", "format", "(", "self", ".", "auth_url_user", ",", "urllib", ".", "urlencode", "(", "dict", "(", "client_id", "=", "self", ".", "client_id", ",", "scope", "=", "' '", ".", "join", "(", "scope", "or", "self", ".", "auth_scope", ")", ",", "response_type", "=", "'code'", ",", "redirect_uri", "=", "self", ".", "auth_redirect_uri", ")", ")", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAuth.auth_user_process_url
Process tokens and errors from redirect_uri.
onedrive/api_v5.py
def auth_user_process_url(self, url): 'Process tokens and errors from redirect_uri.' url = urlparse.urlparse(url) url_qs = dict(it.chain.from_iterable( urlparse.parse_qsl(v) for v in [url.query, url.fragment] )) if url_qs.get('error'): raise APIAuthError( '{} :: {}'.format(url_qs['error'], url_qs.get('error_description')) ) self.auth_code = url_qs['code'] return self.auth_code
def auth_user_process_url(self, url): 'Process tokens and errors from redirect_uri.' url = urlparse.urlparse(url) url_qs = dict(it.chain.from_iterable( urlparse.parse_qsl(v) for v in [url.query, url.fragment] )) if url_qs.get('error'): raise APIAuthError( '{} :: {}'.format(url_qs['error'], url_qs.get('error_description')) ) self.auth_code = url_qs['code'] return self.auth_code
[ "Process", "tokens", "and", "errors", "from", "redirect_uri", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L260-L269
[ "def", "auth_user_process_url", "(", "self", ",", "url", ")", ":", "url", "=", "urlparse", ".", "urlparse", "(", "url", ")", "url_qs", "=", "dict", "(", "it", ".", "chain", ".", "from_iterable", "(", "urlparse", ".", "parse_qsl", "(", "v", ")", "for", "v", "in", "[", "url", ".", "query", ",", "url", ".", "fragment", "]", ")", ")", "if", "url_qs", ".", "get", "(", "'error'", ")", ":", "raise", "APIAuthError", "(", "'{} :: {}'", ".", "format", "(", "url_qs", "[", "'error'", "]", ",", "url_qs", ".", "get", "(", "'error_description'", ")", ")", ")", "self", ".", "auth_code", "=", "url_qs", "[", "'code'", "]", "return", "self", ".", "auth_code" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAuth.auth_get_token
Refresh or acquire access_token.
onedrive/api_v5.py
def auth_get_token(self, check_scope=True): 'Refresh or acquire access_token.' res = self.auth_access_data_raw = self._auth_token_request() return self._auth_token_process(res, check_scope=check_scope)
def auth_get_token(self, check_scope=True): 'Refresh or acquire access_token.' res = self.auth_access_data_raw = self._auth_token_request() return self._auth_token_process(res, check_scope=check_scope)
[ "Refresh", "or", "acquire", "access_token", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L271-L274
[ "def", "auth_get_token", "(", "self", ",", "check_scope", "=", "True", ")", ":", "res", "=", "self", ".", "auth_access_data_raw", "=", "self", ".", "_auth_token_request", "(", ")", "return", "self", ".", "_auth_token_process", "(", "res", ",", "check_scope", "=", "check_scope", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.get_user_id
Returns "id" of a OneDrive user.
onedrive/api_v5.py
def get_user_id(self): 'Returns "id" of a OneDrive user.' if self._user_id is None: self._user_id = self.get_user_data()['id'] return self._user_id
def get_user_id(self): 'Returns "id" of a OneDrive user.' if self._user_id is None: self._user_id = self.get_user_data()['id'] return self._user_id
[ "Returns", "id", "of", "a", "OneDrive", "user", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L409-L413
[ "def", "get_user_id", "(", "self", ")", ":", "if", "self", ".", "_user_id", "is", "None", ":", "self", ".", "_user_id", "=", "self", ".", "get_user_data", "(", ")", "[", "'id'", "]", "return", "self", ".", "_user_id" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.listdir
Get OneDrive object representing list of objects in a folder.
onedrive/api_v5.py
def listdir(self, folder_id='me/skydrive', limit=None, offset=None): 'Get OneDrive object representing list of objects in a folder.' return self(self._api_url_join(folder_id, 'files'), dict(limit=limit, offset=offset))
def listdir(self, folder_id='me/skydrive', limit=None, offset=None): 'Get OneDrive object representing list of objects in a folder.' return self(self._api_url_join(folder_id, 'files'), dict(limit=limit, offset=offset))
[ "Get", "OneDrive", "object", "representing", "list", "of", "objects", "in", "a", "folder", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L415-L417
[ "def", "listdir", "(", "self", ",", "folder_id", "=", "'me/skydrive'", ",", "limit", "=", "None", ",", "offset", "=", "None", ")", ":", "return", "self", "(", "self", ".", "_api_url_join", "(", "folder_id", ",", "'files'", ")", ",", "dict", "(", "limit", "=", "limit", ",", "offset", "=", "offset", ")", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.get
Download and return a file object or a specified byte_range from it. See HTTP Range header (rfc2616) for possible byte_range formats, Examples: "0-499" - byte offsets 0-499 (inclusive), "-500" - final 500 bytes.
onedrive/api_v5.py
def get(self, obj_id, byte_range=None): '''Download and return a file object or a specified byte_range from it. See HTTP Range header (rfc2616) for possible byte_range formats, Examples: "0-499" - byte offsets 0-499 (inclusive), "-500" - final 500 bytes.''' kwz = dict() if byte_range: kwz['headers'] = dict(Range='bytes={}'.format(byte_range)) return self(self._api_url_join(obj_id, 'content'), dict(download='true'), raw=True, **kwz)
def get(self, obj_id, byte_range=None): '''Download and return a file object or a specified byte_range from it. See HTTP Range header (rfc2616) for possible byte_range formats, Examples: "0-499" - byte offsets 0-499 (inclusive), "-500" - final 500 bytes.''' kwz = dict() if byte_range: kwz['headers'] = dict(Range='bytes={}'.format(byte_range)) return self(self._api_url_join(obj_id, 'content'), dict(download='true'), raw=True, **kwz)
[ "Download", "and", "return", "a", "file", "object", "or", "a", "specified", "byte_range", "from", "it", ".", "See", "HTTP", "Range", "header", "(", "rfc2616", ")", "for", "possible", "byte_range", "formats", "Examples", ":", "0", "-", "499", "-", "byte", "offsets", "0", "-", "499", "(", "inclusive", ")", "-", "500", "-", "final", "500", "bytes", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L425-L431
[ "def", "get", "(", "self", ",", "obj_id", ",", "byte_range", "=", "None", ")", ":", "kwz", "=", "dict", "(", ")", "if", "byte_range", ":", "kwz", "[", "'headers'", "]", "=", "dict", "(", "Range", "=", "'bytes={}'", ".", "format", "(", "byte_range", ")", ")", "return", "self", "(", "self", ".", "_api_url_join", "(", "obj_id", ",", "'content'", ")", ",", "dict", "(", "download", "=", "'true'", ")", ",", "raw", "=", "True", ",", "*", "*", "kwz", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.put
Upload a file (object), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. First argument can be either path to a local file or tuple of "(name, file)", where "file" can be either a file-like object or just a string of bytes. overwrite option can be set to False to allow two identically-named files or "ChooseNewName" to let OneDrive derive some similar unique name. Behavior of this option mimics underlying API. downsize is a true/false API flag, similar to overwrite. bits_api_fallback can be either True/False or an integer (number of bytes), and determines whether method will fall back to using BITS API (as implemented by "put_bits" method) for large files. Default "True" (bool) value will use non-BITS file size limit (api_put_max_bytes, ~100 MiB) as a fallback threshold, passing False will force using single-request uploads.
onedrive/api_v5.py
def put( self, path_or_tuple, folder_id='me/skydrive', overwrite=None, downsize=None, bits_api_fallback=True ): '''Upload a file (object), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. First argument can be either path to a local file or tuple of "(name, file)", where "file" can be either a file-like object or just a string of bytes. overwrite option can be set to False to allow two identically-named files or "ChooseNewName" to let OneDrive derive some similar unique name. Behavior of this option mimics underlying API. downsize is a true/false API flag, similar to overwrite. bits_api_fallback can be either True/False or an integer (number of bytes), and determines whether method will fall back to using BITS API (as implemented by "put_bits" method) for large files. Default "True" (bool) value will use non-BITS file size limit (api_put_max_bytes, ~100 MiB) as a fallback threshold, passing False will force using single-request uploads.''' api_overwrite = self._translate_api_flag(overwrite, 'overwrite', ['ChooseNewName']) api_downsize = self._translate_api_flag(downsize, 'downsize') name, src = self._process_upload_source(path_or_tuple) if not isinstance(bits_api_fallback, (int, float, long)): bits_api_fallback = bool(bits_api_fallback) if bits_api_fallback is not False: if bits_api_fallback is True: bits_api_fallback = self.api_put_max_bytes src.seek(0, os.SEEK_END) if src.tell() >= bits_api_fallback: if bits_api_fallback > 0: # not really a "fallback" in this case log.info( 'Falling-back to using BITS API due to file size (%.1f MiB > %.1f MiB)', *((float(v) / 2**20) for v in [src.tell(), bits_api_fallback]) ) if overwrite is not None and api_overwrite != 'true': raise NoAPISupportError( 'Passed "overwrite" flag (value: {!r})' ' is not supported by the BITS API (always "true" there)'.format(overwrite) ) if downsize is not None: log.info( 'Passed "downsize" flag (value: %r) will not' ' be used with BITS API, as it is not supported there', downsize ) file_id = self.put_bits(path_or_tuple, folder_id=folder_id) # XXX: overwrite/downsize return self.info(file_id) # PUT seem to have better support for unicode # filenames and is recommended in the API docs, see #19. # return self( self._api_url_join(folder_id, 'files'), # dict(overwrite=api_overwrite, downsize_photo_uploads=api_downsize), # method='post', files=dict(file=(name, src)) ) return self( self._api_url_join(folder_id, 'files', name), dict(overwrite=api_overwrite, downsize_photo_uploads=api_downsize), data=src, method='put', auth_header=True )
def put( self, path_or_tuple, folder_id='me/skydrive', overwrite=None, downsize=None, bits_api_fallback=True ): '''Upload a file (object), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. First argument can be either path to a local file or tuple of "(name, file)", where "file" can be either a file-like object or just a string of bytes. overwrite option can be set to False to allow two identically-named files or "ChooseNewName" to let OneDrive derive some similar unique name. Behavior of this option mimics underlying API. downsize is a true/false API flag, similar to overwrite. bits_api_fallback can be either True/False or an integer (number of bytes), and determines whether method will fall back to using BITS API (as implemented by "put_bits" method) for large files. Default "True" (bool) value will use non-BITS file size limit (api_put_max_bytes, ~100 MiB) as a fallback threshold, passing False will force using single-request uploads.''' api_overwrite = self._translate_api_flag(overwrite, 'overwrite', ['ChooseNewName']) api_downsize = self._translate_api_flag(downsize, 'downsize') name, src = self._process_upload_source(path_or_tuple) if not isinstance(bits_api_fallback, (int, float, long)): bits_api_fallback = bool(bits_api_fallback) if bits_api_fallback is not False: if bits_api_fallback is True: bits_api_fallback = self.api_put_max_bytes src.seek(0, os.SEEK_END) if src.tell() >= bits_api_fallback: if bits_api_fallback > 0: # not really a "fallback" in this case log.info( 'Falling-back to using BITS API due to file size (%.1f MiB > %.1f MiB)', *((float(v) / 2**20) for v in [src.tell(), bits_api_fallback]) ) if overwrite is not None and api_overwrite != 'true': raise NoAPISupportError( 'Passed "overwrite" flag (value: {!r})' ' is not supported by the BITS API (always "true" there)'.format(overwrite) ) if downsize is not None: log.info( 'Passed "downsize" flag (value: %r) will not' ' be used with BITS API, as it is not supported there', downsize ) file_id = self.put_bits(path_or_tuple, folder_id=folder_id) # XXX: overwrite/downsize return self.info(file_id) # PUT seem to have better support for unicode # filenames and is recommended in the API docs, see #19. # return self( self._api_url_join(folder_id, 'files'), # dict(overwrite=api_overwrite, downsize_photo_uploads=api_downsize), # method='post', files=dict(file=(name, src)) ) return self( self._api_url_join(folder_id, 'files', name), dict(overwrite=api_overwrite, downsize_photo_uploads=api_downsize), data=src, method='put', auth_header=True )
[ "Upload", "a", "file", "(", "object", ")", "possibly", "overwriting", "(", "default", "behavior", ")", "a", "file", "with", "the", "same", "name", "attribute", "if", "it", "exists", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L433-L483
[ "def", "put", "(", "self", ",", "path_or_tuple", ",", "folder_id", "=", "'me/skydrive'", ",", "overwrite", "=", "None", ",", "downsize", "=", "None", ",", "bits_api_fallback", "=", "True", ")", ":", "api_overwrite", "=", "self", ".", "_translate_api_flag", "(", "overwrite", ",", "'overwrite'", ",", "[", "'ChooseNewName'", "]", ")", "api_downsize", "=", "self", ".", "_translate_api_flag", "(", "downsize", ",", "'downsize'", ")", "name", ",", "src", "=", "self", ".", "_process_upload_source", "(", "path_or_tuple", ")", "if", "not", "isinstance", "(", "bits_api_fallback", ",", "(", "int", ",", "float", ",", "long", ")", ")", ":", "bits_api_fallback", "=", "bool", "(", "bits_api_fallback", ")", "if", "bits_api_fallback", "is", "not", "False", ":", "if", "bits_api_fallback", "is", "True", ":", "bits_api_fallback", "=", "self", ".", "api_put_max_bytes", "src", ".", "seek", "(", "0", ",", "os", ".", "SEEK_END", ")", "if", "src", ".", "tell", "(", ")", ">=", "bits_api_fallback", ":", "if", "bits_api_fallback", ">", "0", ":", "# not really a \"fallback\" in this case", "log", ".", "info", "(", "'Falling-back to using BITS API due to file size (%.1f MiB > %.1f MiB)'", ",", "*", "(", "(", "float", "(", "v", ")", "/", "2", "**", "20", ")", "for", "v", "in", "[", "src", ".", "tell", "(", ")", ",", "bits_api_fallback", "]", ")", ")", "if", "overwrite", "is", "not", "None", "and", "api_overwrite", "!=", "'true'", ":", "raise", "NoAPISupportError", "(", "'Passed \"overwrite\" flag (value: {!r})'", "' is not supported by the BITS API (always \"true\" there)'", ".", "format", "(", "overwrite", ")", ")", "if", "downsize", "is", "not", "None", ":", "log", ".", "info", "(", "'Passed \"downsize\" flag (value: %r) will not'", "' be used with BITS API, as it is not supported there'", ",", "downsize", ")", "file_id", "=", "self", ".", "put_bits", "(", "path_or_tuple", ",", "folder_id", "=", "folder_id", ")", "# XXX: overwrite/downsize", "return", "self", ".", "info", "(", "file_id", ")", "# PUT seem to have better support for unicode", "# filenames and is recommended in the API docs, see #19.", "# return self( self._api_url_join(folder_id, 'files'),", "# \tdict(overwrite=api_overwrite, downsize_photo_uploads=api_downsize),", "# \tmethod='post', files=dict(file=(name, src)) )", "return", "self", "(", "self", ".", "_api_url_join", "(", "folder_id", ",", "'files'", ",", "name", ")", ",", "dict", "(", "overwrite", "=", "api_overwrite", ",", "downsize_photo_uploads", "=", "api_downsize", ")", ",", "data", "=", "src", ",", "method", "=", "'put'", ",", "auth_header", "=", "True", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.put_bits
Upload a file (object) using BITS API (via several http requests), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. Unlike "put" method, uploads to "folder_path" (instead of folder_id) are supported here. Either folder path or id can be specified, but not both. Passed "chunk_callback" function (if any) will be called after each uploaded chunk with keyword parameters corresponding to upload state and BITS session info required to resume it, if necessary. Returns id of the uploaded file, as retured by the API if raw_id=True is passed, otherwise in a consistent (with other calls) "file.{user_id}.{file_id}" format (default).
onedrive/api_v5.py
def put_bits( self, path_or_tuple, folder_id=None, folder_path=None, frag_bytes=None, raw_id=False, chunk_callback=None ): '''Upload a file (object) using BITS API (via several http requests), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. Unlike "put" method, uploads to "folder_path" (instead of folder_id) are supported here. Either folder path or id can be specified, but not both. Passed "chunk_callback" function (if any) will be called after each uploaded chunk with keyword parameters corresponding to upload state and BITS session info required to resume it, if necessary. Returns id of the uploaded file, as retured by the API if raw_id=True is passed, otherwise in a consistent (with other calls) "file.{user_id}.{file_id}" format (default).''' # XXX: overwrite/downsize are not documented/supported here (yet?) name, src = self._process_upload_source(path_or_tuple) if folder_id is not None and folder_path is not None: raise ValueError('Either "folder_id" or "folder_path" can be specified, but not both.') if folder_id is None and folder_path is None: folder_id = 'me/skydrive' if folder_id and re.search(r'^me(/.*)$', folder_id): folder_id = self.info(folder_id)['id'] if not frag_bytes: frag_bytes = self.api_bits_default_frag_bytes user_id = self.get_user_id() if folder_id: # workaround for API-ids inconsistency between BITS and regular API match = re.search( r'^(?i)folder.[a-f0-9]+.' '(?P<user_id>[a-f0-9]+(?P<folder_n>!\d+)?)$', folder_id ) if match and not match.group('folder_n'): # root folder is a special case and can't seem to be accessed by id folder_id, folder_path = None, '' else: if not match: raise ValueError('Failed to process folder_id for BITS API: {!r}'.format(folder_id)) folder_id = match.group('user_id') if folder_id: url = self.api_bits_url_by_id.format(folder_id=folder_id, user_id=user_id, filename=name) else: url = self.api_bits_url_by_path.format( folder_id=folder_id, user_id=user_id, file_path=ujoin(folder_path, name).lstrip('/') ) code, headers, body = self( url, method='post', auth_header=True, raw_all=True, headers={ 'X-Http-Method-Override': 'BITS_POST', 'BITS-Packet-Type': 'Create-Session', 'BITS-Supported-Protocols': self.api_bits_protocol_id }) h = lambda k,hs=dict((k.lower(), v) for k,v in headers.viewitems()): hs.get(k, '') checks = [ code == 201, h('bits-packet-type').lower() == 'ack', h('bits-protocol').lower() == self.api_bits_protocol_id.lower(), h('bits-session-id') ] if not all(checks): raise ProtocolError(code, 'Invalid BITS Create-Session response', headers, body, checks) bits_sid = h('bits-session-id') src.seek(0, os.SEEK_END) c, src_len = 0, src.tell() cn = src_len / frag_bytes if frag_bytes * cn != src_len: cn += 1 src.seek(0) for n in xrange(1, cn+1): log.debug( 'Uploading BITS fragment' ' %s / %s (max-size: %.2f MiB)', n, cn, frag_bytes / float(2**20) ) frag = BITSFragment(src, frag_bytes) c1 = c + frag_bytes self( url, method='post', raw=True, data=frag, headers={ 'X-Http-Method-Override': 'BITS_POST', 'BITS-Packet-Type': 'Fragment', 'BITS-Session-Id': bits_sid, 'Content-Range': 'bytes {}-{}/{}'.format(c, min(c1, src_len)-1, src_len) }) c = c1 if chunk_callback: chunk_callback( bytes_transferred=c, bytes_total=src_len, chunks_transferred=n, chunks_total=cn, bits_session_id=bits_sid ) if self.api_bits_auth_refresh_before_commit_hack: # As per #39 and comments under the gist with the spec, # apparently this trick fixes occasional http-5XX errors from the API self.auth_get_token() code, headers, body = self( url, method='post', auth_header=True, raw_all=True, headers={ 'X-Http-Method-Override': 'BITS_POST', 'BITS-Packet-Type': 'Close-Session', 'BITS-Session-Id': bits_sid }) h = lambda k,hs=dict((k.lower(), v) for k,v in headers.viewitems()): hs.get(k, '') checks = [code in [200, 201], h('bits-packet-type').lower() == 'ack' ] # int(h('bits-received-content-range') or 0) == src_len -- documented, but missing # h('bits-session-id') == bits_sid -- documented, but missing if not all(checks): raise ProtocolError(code, 'Invalid BITS Close-Session response', headers, body, checks) # Workaround for API-ids inconsistency between BITS and regular API file_id = h('x-resource-id') if not raw_id: file_id = 'file.{}.{}'.format(user_id, file_id) return file_id
def put_bits( self, path_or_tuple, folder_id=None, folder_path=None, frag_bytes=None, raw_id=False, chunk_callback=None ): '''Upload a file (object) using BITS API (via several http requests), possibly overwriting (default behavior) a file with the same "name" attribute, if it exists. Unlike "put" method, uploads to "folder_path" (instead of folder_id) are supported here. Either folder path or id can be specified, but not both. Passed "chunk_callback" function (if any) will be called after each uploaded chunk with keyword parameters corresponding to upload state and BITS session info required to resume it, if necessary. Returns id of the uploaded file, as retured by the API if raw_id=True is passed, otherwise in a consistent (with other calls) "file.{user_id}.{file_id}" format (default).''' # XXX: overwrite/downsize are not documented/supported here (yet?) name, src = self._process_upload_source(path_or_tuple) if folder_id is not None and folder_path is not None: raise ValueError('Either "folder_id" or "folder_path" can be specified, but not both.') if folder_id is None and folder_path is None: folder_id = 'me/skydrive' if folder_id and re.search(r'^me(/.*)$', folder_id): folder_id = self.info(folder_id)['id'] if not frag_bytes: frag_bytes = self.api_bits_default_frag_bytes user_id = self.get_user_id() if folder_id: # workaround for API-ids inconsistency between BITS and regular API match = re.search( r'^(?i)folder.[a-f0-9]+.' '(?P<user_id>[a-f0-9]+(?P<folder_n>!\d+)?)$', folder_id ) if match and not match.group('folder_n'): # root folder is a special case and can't seem to be accessed by id folder_id, folder_path = None, '' else: if not match: raise ValueError('Failed to process folder_id for BITS API: {!r}'.format(folder_id)) folder_id = match.group('user_id') if folder_id: url = self.api_bits_url_by_id.format(folder_id=folder_id, user_id=user_id, filename=name) else: url = self.api_bits_url_by_path.format( folder_id=folder_id, user_id=user_id, file_path=ujoin(folder_path, name).lstrip('/') ) code, headers, body = self( url, method='post', auth_header=True, raw_all=True, headers={ 'X-Http-Method-Override': 'BITS_POST', 'BITS-Packet-Type': 'Create-Session', 'BITS-Supported-Protocols': self.api_bits_protocol_id }) h = lambda k,hs=dict((k.lower(), v) for k,v in headers.viewitems()): hs.get(k, '') checks = [ code == 201, h('bits-packet-type').lower() == 'ack', h('bits-protocol').lower() == self.api_bits_protocol_id.lower(), h('bits-session-id') ] if not all(checks): raise ProtocolError(code, 'Invalid BITS Create-Session response', headers, body, checks) bits_sid = h('bits-session-id') src.seek(0, os.SEEK_END) c, src_len = 0, src.tell() cn = src_len / frag_bytes if frag_bytes * cn != src_len: cn += 1 src.seek(0) for n in xrange(1, cn+1): log.debug( 'Uploading BITS fragment' ' %s / %s (max-size: %.2f MiB)', n, cn, frag_bytes / float(2**20) ) frag = BITSFragment(src, frag_bytes) c1 = c + frag_bytes self( url, method='post', raw=True, data=frag, headers={ 'X-Http-Method-Override': 'BITS_POST', 'BITS-Packet-Type': 'Fragment', 'BITS-Session-Id': bits_sid, 'Content-Range': 'bytes {}-{}/{}'.format(c, min(c1, src_len)-1, src_len) }) c = c1 if chunk_callback: chunk_callback( bytes_transferred=c, bytes_total=src_len, chunks_transferred=n, chunks_total=cn, bits_session_id=bits_sid ) if self.api_bits_auth_refresh_before_commit_hack: # As per #39 and comments under the gist with the spec, # apparently this trick fixes occasional http-5XX errors from the API self.auth_get_token() code, headers, body = self( url, method='post', auth_header=True, raw_all=True, headers={ 'X-Http-Method-Override': 'BITS_POST', 'BITS-Packet-Type': 'Close-Session', 'BITS-Session-Id': bits_sid }) h = lambda k,hs=dict((k.lower(), v) for k,v in headers.viewitems()): hs.get(k, '') checks = [code in [200, 201], h('bits-packet-type').lower() == 'ack' ] # int(h('bits-received-content-range') or 0) == src_len -- documented, but missing # h('bits-session-id') == bits_sid -- documented, but missing if not all(checks): raise ProtocolError(code, 'Invalid BITS Close-Session response', headers, body, checks) # Workaround for API-ids inconsistency between BITS and regular API file_id = h('x-resource-id') if not raw_id: file_id = 'file.{}.{}'.format(user_id, file_id) return file_id
[ "Upload", "a", "file", "(", "object", ")", "using", "BITS", "API", "(", "via", "several", "http", "requests", ")", "possibly", "overwriting", "(", "default", "behavior", ")", "a", "file", "with", "the", "same", "name", "attribute", "if", "it", "exists", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L485-L590
[ "def", "put_bits", "(", "self", ",", "path_or_tuple", ",", "folder_id", "=", "None", ",", "folder_path", "=", "None", ",", "frag_bytes", "=", "None", ",", "raw_id", "=", "False", ",", "chunk_callback", "=", "None", ")", ":", "# XXX: overwrite/downsize are not documented/supported here (yet?)", "name", ",", "src", "=", "self", ".", "_process_upload_source", "(", "path_or_tuple", ")", "if", "folder_id", "is", "not", "None", "and", "folder_path", "is", "not", "None", ":", "raise", "ValueError", "(", "'Either \"folder_id\" or \"folder_path\" can be specified, but not both.'", ")", "if", "folder_id", "is", "None", "and", "folder_path", "is", "None", ":", "folder_id", "=", "'me/skydrive'", "if", "folder_id", "and", "re", ".", "search", "(", "r'^me(/.*)$'", ",", "folder_id", ")", ":", "folder_id", "=", "self", ".", "info", "(", "folder_id", ")", "[", "'id'", "]", "if", "not", "frag_bytes", ":", "frag_bytes", "=", "self", ".", "api_bits_default_frag_bytes", "user_id", "=", "self", ".", "get_user_id", "(", ")", "if", "folder_id", ":", "# workaround for API-ids inconsistency between BITS and regular API", "match", "=", "re", ".", "search", "(", "r'^(?i)folder.[a-f0-9]+.'", "'(?P<user_id>[a-f0-9]+(?P<folder_n>!\\d+)?)$'", ",", "folder_id", ")", "if", "match", "and", "not", "match", ".", "group", "(", "'folder_n'", ")", ":", "# root folder is a special case and can't seem to be accessed by id", "folder_id", ",", "folder_path", "=", "None", ",", "''", "else", ":", "if", "not", "match", ":", "raise", "ValueError", "(", "'Failed to process folder_id for BITS API: {!r}'", ".", "format", "(", "folder_id", ")", ")", "folder_id", "=", "match", ".", "group", "(", "'user_id'", ")", "if", "folder_id", ":", "url", "=", "self", ".", "api_bits_url_by_id", ".", "format", "(", "folder_id", "=", "folder_id", ",", "user_id", "=", "user_id", ",", "filename", "=", "name", ")", "else", ":", "url", "=", "self", ".", "api_bits_url_by_path", ".", "format", "(", "folder_id", "=", "folder_id", ",", "user_id", "=", "user_id", ",", "file_path", "=", "ujoin", "(", "folder_path", ",", "name", ")", ".", "lstrip", "(", "'/'", ")", ")", "code", ",", "headers", ",", "body", "=", "self", "(", "url", ",", "method", "=", "'post'", ",", "auth_header", "=", "True", ",", "raw_all", "=", "True", ",", "headers", "=", "{", "'X-Http-Method-Override'", ":", "'BITS_POST'", ",", "'BITS-Packet-Type'", ":", "'Create-Session'", ",", "'BITS-Supported-Protocols'", ":", "self", ".", "api_bits_protocol_id", "}", ")", "h", "=", "lambda", "k", ",", "hs", "=", "dict", "(", "(", "k", ".", "lower", "(", ")", ",", "v", ")", "for", "k", ",", "v", "in", "headers", ".", "viewitems", "(", ")", ")", ":", "hs", ".", "get", "(", "k", ",", "''", ")", "checks", "=", "[", "code", "==", "201", ",", "h", "(", "'bits-packet-type'", ")", ".", "lower", "(", ")", "==", "'ack'", ",", "h", "(", "'bits-protocol'", ")", ".", "lower", "(", ")", "==", "self", ".", "api_bits_protocol_id", ".", "lower", "(", ")", ",", "h", "(", "'bits-session-id'", ")", "]", "if", "not", "all", "(", "checks", ")", ":", "raise", "ProtocolError", "(", "code", ",", "'Invalid BITS Create-Session response'", ",", "headers", ",", "body", ",", "checks", ")", "bits_sid", "=", "h", "(", "'bits-session-id'", ")", "src", ".", "seek", "(", "0", ",", "os", ".", "SEEK_END", ")", "c", ",", "src_len", "=", "0", ",", "src", ".", "tell", "(", ")", "cn", "=", "src_len", "/", "frag_bytes", "if", "frag_bytes", "*", "cn", "!=", "src_len", ":", "cn", "+=", "1", "src", ".", "seek", "(", "0", ")", "for", "n", "in", "xrange", "(", "1", ",", "cn", "+", "1", ")", ":", "log", ".", "debug", "(", "'Uploading BITS fragment'", "' %s / %s (max-size: %.2f MiB)'", ",", "n", ",", "cn", ",", "frag_bytes", "/", "float", "(", "2", "**", "20", ")", ")", "frag", "=", "BITSFragment", "(", "src", ",", "frag_bytes", ")", "c1", "=", "c", "+", "frag_bytes", "self", "(", "url", ",", "method", "=", "'post'", ",", "raw", "=", "True", ",", "data", "=", "frag", ",", "headers", "=", "{", "'X-Http-Method-Override'", ":", "'BITS_POST'", ",", "'BITS-Packet-Type'", ":", "'Fragment'", ",", "'BITS-Session-Id'", ":", "bits_sid", ",", "'Content-Range'", ":", "'bytes {}-{}/{}'", ".", "format", "(", "c", ",", "min", "(", "c1", ",", "src_len", ")", "-", "1", ",", "src_len", ")", "}", ")", "c", "=", "c1", "if", "chunk_callback", ":", "chunk_callback", "(", "bytes_transferred", "=", "c", ",", "bytes_total", "=", "src_len", ",", "chunks_transferred", "=", "n", ",", "chunks_total", "=", "cn", ",", "bits_session_id", "=", "bits_sid", ")", "if", "self", ".", "api_bits_auth_refresh_before_commit_hack", ":", "# As per #39 and comments under the gist with the spec,", "# apparently this trick fixes occasional http-5XX errors from the API", "self", ".", "auth_get_token", "(", ")", "code", ",", "headers", ",", "body", "=", "self", "(", "url", ",", "method", "=", "'post'", ",", "auth_header", "=", "True", ",", "raw_all", "=", "True", ",", "headers", "=", "{", "'X-Http-Method-Override'", ":", "'BITS_POST'", ",", "'BITS-Packet-Type'", ":", "'Close-Session'", ",", "'BITS-Session-Id'", ":", "bits_sid", "}", ")", "h", "=", "lambda", "k", ",", "hs", "=", "dict", "(", "(", "k", ".", "lower", "(", ")", ",", "v", ")", "for", "k", ",", "v", "in", "headers", ".", "viewitems", "(", ")", ")", ":", "hs", ".", "get", "(", "k", ",", "''", ")", "checks", "=", "[", "code", "in", "[", "200", ",", "201", "]", ",", "h", "(", "'bits-packet-type'", ")", ".", "lower", "(", ")", "==", "'ack'", "]", "# int(h('bits-received-content-range') or 0) == src_len -- documented, but missing", "# h('bits-session-id') == bits_sid -- documented, but missing", "if", "not", "all", "(", "checks", ")", ":", "raise", "ProtocolError", "(", "code", ",", "'Invalid BITS Close-Session response'", ",", "headers", ",", "body", ",", "checks", ")", "# Workaround for API-ids inconsistency between BITS and regular API", "file_id", "=", "h", "(", "'x-resource-id'", ")", "if", "not", "raw_id", ":", "file_id", "=", "'file.{}.{}'", ".", "format", "(", "user_id", ",", "file_id", ")", "return", "file_id" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.mkdir
Create a folder with a specified "name" attribute. folder_id allows to specify a parent folder. metadata mapping may contain additional folder properties to pass to an API.
onedrive/api_v5.py
def mkdir(self, name=None, folder_id='me/skydrive', metadata=dict()): '''Create a folder with a specified "name" attribute. folder_id allows to specify a parent folder. metadata mapping may contain additional folder properties to pass to an API.''' metadata = metadata.copy() if name: metadata['name'] = name return self(folder_id, data=metadata, method='post', auth_header=True)
def mkdir(self, name=None, folder_id='me/skydrive', metadata=dict()): '''Create a folder with a specified "name" attribute. folder_id allows to specify a parent folder. metadata mapping may contain additional folder properties to pass to an API.''' metadata = metadata.copy() if name: metadata['name'] = name return self(folder_id, data=metadata, method='post', auth_header=True)
[ "Create", "a", "folder", "with", "a", "specified", "name", "attribute", ".", "folder_id", "allows", "to", "specify", "a", "parent", "folder", ".", "metadata", "mapping", "may", "contain", "additional", "folder", "properties", "to", "pass", "to", "an", "API", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L592-L598
[ "def", "mkdir", "(", "self", ",", "name", "=", "None", ",", "folder_id", "=", "'me/skydrive'", ",", "metadata", "=", "dict", "(", ")", ")", ":", "metadata", "=", "metadata", ".", "copy", "(", ")", "if", "name", ":", "metadata", "[", "'name'", "]", "=", "name", "return", "self", "(", "folder_id", ",", "data", "=", "metadata", ",", "method", "=", "'post'", ",", "auth_header", "=", "True", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.info_update
Update metadata with of a specified object. See http://msdn.microsoft.com/en-us/library/live/hh243648.aspx for the list of RW keys for each object type.
onedrive/api_v5.py
def info_update(self, obj_id, data): '''Update metadata with of a specified object. See http://msdn.microsoft.com/en-us/library/live/hh243648.aspx for the list of RW keys for each object type.''' return self(obj_id, method='put', data=data, auth_header=True)
def info_update(self, obj_id, data): '''Update metadata with of a specified object. See http://msdn.microsoft.com/en-us/library/live/hh243648.aspx for the list of RW keys for each object type.''' return self(obj_id, method='put', data=data, auth_header=True)
[ "Update", "metadata", "with", "of", "a", "specified", "object", ".", "See", "http", ":", "//", "msdn", ".", "microsoft", ".", "com", "/", "en", "-", "us", "/", "library", "/", "live", "/", "hh243648", ".", "aspx", "for", "the", "list", "of", "RW", "keys", "for", "each", "object", "type", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L604-L608
[ "def", "info_update", "(", "self", ",", "obj_id", ",", "data", ")", ":", "return", "self", "(", "obj_id", ",", "method", "=", "'put'", ",", "data", "=", "data", ",", "auth_header", "=", "True", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.link
Return a preauthenticated (usable by anyone) link to a specified object. Object will be considered "shared" by OneDrive, even if link is never actually used. link_type can be either "embed" (returns html), "shared_read_link" or "shared_edit_link".
onedrive/api_v5.py
def link(self, obj_id, link_type='shared_read_link'): '''Return a preauthenticated (usable by anyone) link to a specified object. Object will be considered "shared" by OneDrive, even if link is never actually used. link_type can be either "embed" (returns html), "shared_read_link" or "shared_edit_link".''' assert link_type in ['embed', 'shared_read_link', 'shared_edit_link'] return self(self._api_url_join(obj_id, link_type), method='get')
def link(self, obj_id, link_type='shared_read_link'): '''Return a preauthenticated (usable by anyone) link to a specified object. Object will be considered "shared" by OneDrive, even if link is never actually used. link_type can be either "embed" (returns html), "shared_read_link" or "shared_edit_link".''' assert link_type in ['embed', 'shared_read_link', 'shared_edit_link'] return self(self._api_url_join(obj_id, link_type), method='get')
[ "Return", "a", "preauthenticated", "(", "usable", "by", "anyone", ")", "link", "to", "a", "specified", "object", ".", "Object", "will", "be", "considered", "shared", "by", "OneDrive", "even", "if", "link", "is", "never", "actually", "used", ".", "link_type", "can", "be", "either", "embed", "(", "returns", "html", ")", "shared_read_link", "or", "shared_edit_link", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L610-L616
[ "def", "link", "(", "self", ",", "obj_id", ",", "link_type", "=", "'shared_read_link'", ")", ":", "assert", "link_type", "in", "[", "'embed'", ",", "'shared_read_link'", ",", "'shared_edit_link'", "]", "return", "self", "(", "self", ".", "_api_url_join", "(", "obj_id", ",", "link_type", ")", ",", "method", "=", "'get'", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.copy
Copy specified file (object) to a folder with a given ID. Well-known folder names (like "me/skydrive") don't seem to work here. Folders cannot be copied; this is an API limitation.
onedrive/api_v5.py
def copy(self, obj_id, folder_id, move=False): '''Copy specified file (object) to a folder with a given ID. Well-known folder names (like "me/skydrive") don't seem to work here. Folders cannot be copied; this is an API limitation.''' return self( obj_id, method='copy' if not move else 'move', data=dict(destination=folder_id), auth_header=True )
def copy(self, obj_id, folder_id, move=False): '''Copy specified file (object) to a folder with a given ID. Well-known folder names (like "me/skydrive") don't seem to work here. Folders cannot be copied; this is an API limitation.''' return self( obj_id, method='copy' if not move else 'move', data=dict(destination=folder_id), auth_header=True )
[ "Copy", "specified", "file", "(", "object", ")", "to", "a", "folder", "with", "a", "given", "ID", ".", "Well", "-", "known", "folder", "names", "(", "like", "me", "/", "skydrive", ")", "don", "t", "seem", "to", "work", "here", ".", "Folders", "cannot", "be", "copied", ";", "this", "is", "an", "API", "limitation", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L618-L625
[ "def", "copy", "(", "self", ",", "obj_id", ",", "folder_id", ",", "move", "=", "False", ")", ":", "return", "self", "(", "obj_id", ",", "method", "=", "'copy'", "if", "not", "move", "else", "'move'", ",", "data", "=", "dict", "(", "destination", "=", "folder_id", ")", ",", "auth_header", "=", "True", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.move
Move specified file (object) to a folder. Note that folders cannot be moved, this is an API limitation.
onedrive/api_v5.py
def move(self, obj_id, folder_id): '''Move specified file (object) to a folder. Note that folders cannot be moved, this is an API limitation.''' return self.copy(obj_id, folder_id, move=True)
def move(self, obj_id, folder_id): '''Move specified file (object) to a folder. Note that folders cannot be moved, this is an API limitation.''' return self.copy(obj_id, folder_id, move=True)
[ "Move", "specified", "file", "(", "object", ")", "to", "a", "folder", ".", "Note", "that", "folders", "cannot", "be", "moved", "this", "is", "an", "API", "limitation", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L627-L630
[ "def", "move", "(", "self", ",", "obj_id", ",", "folder_id", ")", ":", "return", "self", ".", "copy", "(", "obj_id", ",", "folder_id", ",", "move", "=", "True", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPIWrapper.comment_add
Add comment message to a specified object.
onedrive/api_v5.py
def comment_add(self, obj_id, message): 'Add comment message to a specified object.' return self( self._api_url_join(obj_id, 'comments'), method='post', data=dict(message=message), auth_header=True )
def comment_add(self, obj_id, message): 'Add comment message to a specified object.' return self( self._api_url_join(obj_id, 'comments'), method='post', data=dict(message=message), auth_header=True )
[ "Add", "comment", "message", "to", "a", "specified", "object", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L636-L639
[ "def", "comment_add", "(", "self", ",", "obj_id", ",", "message", ")", ":", "return", "self", "(", "self", ".", "_api_url_join", "(", "obj_id", ",", "'comments'", ")", ",", "method", "=", "'post'", ",", "data", "=", "dict", "(", "message", "=", "message", ")", ",", "auth_header", "=", "True", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPI.resolve_path
Return id (or metadata) of an object, specified by chain (iterable or fs-style path string) of "name" attributes of its ancestors, or raises DoesNotExists error. Requires many calls to resolve each name in path, so use with care. root_id parameter allows to specify path relative to some folder_id (default: me/skydrive).
onedrive/api_v5.py
def resolve_path(self, path, root_id='me/skydrive', objects=False, listdir_limit=500): '''Return id (or metadata) of an object, specified by chain (iterable or fs-style path string) of "name" attributes of its ancestors, or raises DoesNotExists error. Requires many calls to resolve each name in path, so use with care. root_id parameter allows to specify path relative to some folder_id (default: me/skydrive).''' if path: if isinstance(path, types.StringTypes): if not path.startswith('me/skydrive'): # Split path by both kinds of slashes path = filter(None, it.chain.from_iterable(p.split('\\') for p in path.split('/'))) else: root_id, path = path, None if path: try: for i, name in enumerate(path): offset = None while True: obj_list = self.listdir(root_id, offset=offset, limit=listdir_limit) try: root_id = dict(it.imap(op.itemgetter('name', 'id'), obj_list))[name] except KeyError: if len(obj_list) < listdir_limit: raise # assuming that it's the last page offset = (offset or 0) + listdir_limit else: break except (KeyError, ProtocolError) as err: if isinstance(err, ProtocolError) and err.code != 404: raise raise DoesNotExists(root_id, path[i:]) return root_id if not objects else self.info(root_id)
def resolve_path(self, path, root_id='me/skydrive', objects=False, listdir_limit=500): '''Return id (or metadata) of an object, specified by chain (iterable or fs-style path string) of "name" attributes of its ancestors, or raises DoesNotExists error. Requires many calls to resolve each name in path, so use with care. root_id parameter allows to specify path relative to some folder_id (default: me/skydrive).''' if path: if isinstance(path, types.StringTypes): if not path.startswith('me/skydrive'): # Split path by both kinds of slashes path = filter(None, it.chain.from_iterable(p.split('\\') for p in path.split('/'))) else: root_id, path = path, None if path: try: for i, name in enumerate(path): offset = None while True: obj_list = self.listdir(root_id, offset=offset, limit=listdir_limit) try: root_id = dict(it.imap(op.itemgetter('name', 'id'), obj_list))[name] except KeyError: if len(obj_list) < listdir_limit: raise # assuming that it's the last page offset = (offset or 0) + listdir_limit else: break except (KeyError, ProtocolError) as err: if isinstance(err, ProtocolError) and err.code != 404: raise raise DoesNotExists(root_id, path[i:]) return root_id if not objects else self.info(root_id)
[ "Return", "id", "(", "or", "metadata", ")", "of", "an", "object", "specified", "by", "chain", "(", "iterable", "or", "fs", "-", "style", "path", "string", ")", "of", "name", "attributes", "of", "its", "ancestors", "or", "raises", "DoesNotExists", "error", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L651-L679
[ "def", "resolve_path", "(", "self", ",", "path", ",", "root_id", "=", "'me/skydrive'", ",", "objects", "=", "False", ",", "listdir_limit", "=", "500", ")", ":", "if", "path", ":", "if", "isinstance", "(", "path", ",", "types", ".", "StringTypes", ")", ":", "if", "not", "path", ".", "startswith", "(", "'me/skydrive'", ")", ":", "# Split path by both kinds of slashes", "path", "=", "filter", "(", "None", ",", "it", ".", "chain", ".", "from_iterable", "(", "p", ".", "split", "(", "'\\\\'", ")", "for", "p", "in", "path", ".", "split", "(", "'/'", ")", ")", ")", "else", ":", "root_id", ",", "path", "=", "path", ",", "None", "if", "path", ":", "try", ":", "for", "i", ",", "name", "in", "enumerate", "(", "path", ")", ":", "offset", "=", "None", "while", "True", ":", "obj_list", "=", "self", ".", "listdir", "(", "root_id", ",", "offset", "=", "offset", ",", "limit", "=", "listdir_limit", ")", "try", ":", "root_id", "=", "dict", "(", "it", ".", "imap", "(", "op", ".", "itemgetter", "(", "'name'", ",", "'id'", ")", ",", "obj_list", ")", ")", "[", "name", "]", "except", "KeyError", ":", "if", "len", "(", "obj_list", ")", "<", "listdir_limit", ":", "raise", "# assuming that it's the last page", "offset", "=", "(", "offset", "or", "0", ")", "+", "listdir_limit", "else", ":", "break", "except", "(", "KeyError", ",", "ProtocolError", ")", "as", "err", ":", "if", "isinstance", "(", "err", ",", "ProtocolError", ")", "and", "err", ".", "code", "!=", "404", ":", "raise", "raise", "DoesNotExists", "(", "root_id", ",", "path", "[", "i", ":", "]", ")", "return", "root_id", "if", "not", "objects", "else", "self", ".", "info", "(", "root_id", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPI.listdir
Return a list of objects in the specified folder_id. limit is passed to the API, so might be used as optimization. type_filter can be set to type (str) or sequence of object types to return, post-api-call processing.
onedrive/api_v5.py
def listdir(self, folder_id='me/skydrive', type_filter=None, limit=None, offset=None): '''Return a list of objects in the specified folder_id. limit is passed to the API, so might be used as optimization. type_filter can be set to type (str) or sequence of object types to return, post-api-call processing.''' lst = super(OneDriveAPI, self)\ .listdir(folder_id=folder_id, limit=limit, offset=offset)['data'] if type_filter: if isinstance(type_filter, types.StringTypes): type_filter = {type_filter} lst = list(obj for obj in lst if obj['type'] in type_filter) return lst
def listdir(self, folder_id='me/skydrive', type_filter=None, limit=None, offset=None): '''Return a list of objects in the specified folder_id. limit is passed to the API, so might be used as optimization. type_filter can be set to type (str) or sequence of object types to return, post-api-call processing.''' lst = super(OneDriveAPI, self)\ .listdir(folder_id=folder_id, limit=limit, offset=offset)['data'] if type_filter: if isinstance(type_filter, types.StringTypes): type_filter = {type_filter} lst = list(obj for obj in lst if obj['type'] in type_filter) return lst
[ "Return", "a", "list", "of", "objects", "in", "the", "specified", "folder_id", ".", "limit", "is", "passed", "to", "the", "API", "so", "might", "be", "used", "as", "optimization", ".", "type_filter", "can", "be", "set", "to", "type", "(", "str", ")", "or", "sequence", "of", "object", "types", "to", "return", "post", "-", "api", "-", "call", "processing", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L685-L695
[ "def", "listdir", "(", "self", ",", "folder_id", "=", "'me/skydrive'", ",", "type_filter", "=", "None", ",", "limit", "=", "None", ",", "offset", "=", "None", ")", ":", "lst", "=", "super", "(", "OneDriveAPI", ",", "self", ")", ".", "listdir", "(", "folder_id", "=", "folder_id", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ")", "[", "'data'", "]", "if", "type_filter", ":", "if", "isinstance", "(", "type_filter", ",", "types", ".", "StringTypes", ")", ":", "type_filter", "=", "{", "type_filter", "}", "lst", "=", "list", "(", "obj", "for", "obj", "in", "lst", "if", "obj", "[", "'type'", "]", "in", "type_filter", ")", "return", "lst" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
OneDriveAPI.copy
Copy specified file (object) to a folder. Note that folders cannot be copied, this is an API limitation.
onedrive/api_v5.py
def copy(self, obj_id, folder_id, move=False): '''Copy specified file (object) to a folder. Note that folders cannot be copied, this is an API limitation.''' if folder_id.startswith('me/skydrive'): log.info( 'Special folder names (like "me/skydrive") dont' ' seem to work with copy/move operations, resolving it to id' ) folder_id = self.info(folder_id)['id'] return super(OneDriveAPI, self).copy(obj_id, folder_id, move=move)
def copy(self, obj_id, folder_id, move=False): '''Copy specified file (object) to a folder. Note that folders cannot be copied, this is an API limitation.''' if folder_id.startswith('me/skydrive'): log.info( 'Special folder names (like "me/skydrive") dont' ' seem to work with copy/move operations, resolving it to id' ) folder_id = self.info(folder_id)['id'] return super(OneDriveAPI, self).copy(obj_id, folder_id, move=move)
[ "Copy", "specified", "file", "(", "object", ")", "to", "a", "folder", ".", "Note", "that", "folders", "cannot", "be", "copied", "this", "is", "an", "API", "limitation", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/api_v5.py#L697-L704
[ "def", "copy", "(", "self", ",", "obj_id", ",", "folder_id", ",", "move", "=", "False", ")", ":", "if", "folder_id", ".", "startswith", "(", "'me/skydrive'", ")", ":", "log", ".", "info", "(", "'Special folder names (like \"me/skydrive\") dont'", "' seem to work with copy/move operations, resolving it to id'", ")", "folder_id", "=", "self", ".", "info", "(", "folder_id", ")", "[", "'id'", "]", "return", "super", "(", "OneDriveAPI", ",", "self", ")", ".", "copy", "(", "obj_id", ",", "folder_id", ",", "move", "=", "move", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
ConfigMixin.from_conf
Initialize instance from YAML configuration file, writing updates (only to keys, specified by "conf_update_keys") back to it.
onedrive/conf.py
def from_conf(cls, path=None, **overrides): '''Initialize instance from YAML configuration file, writing updates (only to keys, specified by "conf_update_keys") back to it.''' from onedrive import portalocker import yaml if path is None: path = cls.conf_path_default log.debug('Using default state-file path: %r', path) path = os.path.expanduser(path) with open(path, 'rb') as src: portalocker.lock(src, portalocker.LOCK_SH) yaml_str = src.read() portalocker.unlock(src) conf = yaml.safe_load(yaml_str) conf.setdefault('conf_save', path) conf_cls = dict() for ns, keys in cls.conf_update_keys.viewitems(): for k in keys: try: v = conf.get(ns, dict()).get(k) except AttributeError: if not cls.conf_raise_structure_errors: raise raise KeyError(( 'Unable to get value for configuration parameter' ' "{k}" in section "{ns}", check configuration file (path: {path}) syntax' ' near the aforementioned section/value.' ).format(ns=ns, k=k, path=path)) if v is not None: conf_cls['{}_{}'.format(ns, k)] = conf[ns][k] conf_cls.update(overrides) # Hack to work around YAML parsing client_id of e.g. 000123 as an octal int if isinstance(conf.get('client', dict()).get('id'), (int, long)): log.warn( 'Detected client_id being parsed as an integer (as per yaml), trying to un-mangle it.' ' If requests will still fail afterwards, please replace it in the configuration file (path: %r),' ' also putting single or double quotes (either one should work) around the value.', path ) cid = conf['client']['id'] if not re.search(r'\b(0*)?{:d}\b'.format(cid), yaml_str)\ and re.search(r'\b(0*)?{:o}\b'.format(cid), yaml_str): cid = int('{:0}'.format(cid)) conf['client']['id'] = '{:016d}'.format(cid) self = cls(**conf_cls) self.conf_save = conf['conf_save'] return self
def from_conf(cls, path=None, **overrides): '''Initialize instance from YAML configuration file, writing updates (only to keys, specified by "conf_update_keys") back to it.''' from onedrive import portalocker import yaml if path is None: path = cls.conf_path_default log.debug('Using default state-file path: %r', path) path = os.path.expanduser(path) with open(path, 'rb') as src: portalocker.lock(src, portalocker.LOCK_SH) yaml_str = src.read() portalocker.unlock(src) conf = yaml.safe_load(yaml_str) conf.setdefault('conf_save', path) conf_cls = dict() for ns, keys in cls.conf_update_keys.viewitems(): for k in keys: try: v = conf.get(ns, dict()).get(k) except AttributeError: if not cls.conf_raise_structure_errors: raise raise KeyError(( 'Unable to get value for configuration parameter' ' "{k}" in section "{ns}", check configuration file (path: {path}) syntax' ' near the aforementioned section/value.' ).format(ns=ns, k=k, path=path)) if v is not None: conf_cls['{}_{}'.format(ns, k)] = conf[ns][k] conf_cls.update(overrides) # Hack to work around YAML parsing client_id of e.g. 000123 as an octal int if isinstance(conf.get('client', dict()).get('id'), (int, long)): log.warn( 'Detected client_id being parsed as an integer (as per yaml), trying to un-mangle it.' ' If requests will still fail afterwards, please replace it in the configuration file (path: %r),' ' also putting single or double quotes (either one should work) around the value.', path ) cid = conf['client']['id'] if not re.search(r'\b(0*)?{:d}\b'.format(cid), yaml_str)\ and re.search(r'\b(0*)?{:o}\b'.format(cid), yaml_str): cid = int('{:0}'.format(cid)) conf['client']['id'] = '{:016d}'.format(cid) self = cls(**conf_cls) self.conf_save = conf['conf_save'] return self
[ "Initialize", "instance", "from", "YAML", "configuration", "file", "writing", "updates", "(", "only", "to", "keys", "specified", "by", "conf_update_keys", ")", "back", "to", "it", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/conf.py#L36-L80
[ "def", "from_conf", "(", "cls", ",", "path", "=", "None", ",", "*", "*", "overrides", ")", ":", "from", "onedrive", "import", "portalocker", "import", "yaml", "if", "path", "is", "None", ":", "path", "=", "cls", ".", "conf_path_default", "log", ".", "debug", "(", "'Using default state-file path: %r'", ",", "path", ")", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "with", "open", "(", "path", ",", "'rb'", ")", "as", "src", ":", "portalocker", ".", "lock", "(", "src", ",", "portalocker", ".", "LOCK_SH", ")", "yaml_str", "=", "src", ".", "read", "(", ")", "portalocker", ".", "unlock", "(", "src", ")", "conf", "=", "yaml", ".", "safe_load", "(", "yaml_str", ")", "conf", ".", "setdefault", "(", "'conf_save'", ",", "path", ")", "conf_cls", "=", "dict", "(", ")", "for", "ns", ",", "keys", "in", "cls", ".", "conf_update_keys", ".", "viewitems", "(", ")", ":", "for", "k", "in", "keys", ":", "try", ":", "v", "=", "conf", ".", "get", "(", "ns", ",", "dict", "(", ")", ")", ".", "get", "(", "k", ")", "except", "AttributeError", ":", "if", "not", "cls", ".", "conf_raise_structure_errors", ":", "raise", "raise", "KeyError", "(", "(", "'Unable to get value for configuration parameter'", "' \"{k}\" in section \"{ns}\", check configuration file (path: {path}) syntax'", "' near the aforementioned section/value.'", ")", ".", "format", "(", "ns", "=", "ns", ",", "k", "=", "k", ",", "path", "=", "path", ")", ")", "if", "v", "is", "not", "None", ":", "conf_cls", "[", "'{}_{}'", ".", "format", "(", "ns", ",", "k", ")", "]", "=", "conf", "[", "ns", "]", "[", "k", "]", "conf_cls", ".", "update", "(", "overrides", ")", "# Hack to work around YAML parsing client_id of e.g. 000123 as an octal int", "if", "isinstance", "(", "conf", ".", "get", "(", "'client'", ",", "dict", "(", ")", ")", ".", "get", "(", "'id'", ")", ",", "(", "int", ",", "long", ")", ")", ":", "log", ".", "warn", "(", "'Detected client_id being parsed as an integer (as per yaml), trying to un-mangle it.'", "' If requests will still fail afterwards, please replace it in the configuration file (path: %r),'", "' also putting single or double quotes (either one should work) around the value.'", ",", "path", ")", "cid", "=", "conf", "[", "'client'", "]", "[", "'id'", "]", "if", "not", "re", ".", "search", "(", "r'\\b(0*)?{:d}\\b'", ".", "format", "(", "cid", ")", ",", "yaml_str", ")", "and", "re", ".", "search", "(", "r'\\b(0*)?{:o}\\b'", ".", "format", "(", "cid", ")", ",", "yaml_str", ")", ":", "cid", "=", "int", "(", "'{:0}'", ".", "format", "(", "cid", ")", ")", "conf", "[", "'client'", "]", "[", "'id'", "]", "=", "'{:016d}'", ".", "format", "(", "cid", ")", "self", "=", "cls", "(", "*", "*", "conf_cls", ")", "self", ".", "conf_save", "=", "conf", "[", "'conf_save'", "]", "return", "self" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
decode_obj
Convert or dump object to unicode.
onedrive/cli_tool.py
def decode_obj(obj, force=False): 'Convert or dump object to unicode.' if isinstance(obj, unicode): return obj elif isinstance(obj, bytes): if force_encoding is not None: return obj.decode(force_encoding) if chardet: enc_guess = chardet.detect(obj) if enc_guess['confidence'] > 0.7: return obj.decode(enc_guess['encoding']) return obj.decode('utf-8') else: return obj if not force else repr(obj)
def decode_obj(obj, force=False): 'Convert or dump object to unicode.' if isinstance(obj, unicode): return obj elif isinstance(obj, bytes): if force_encoding is not None: return obj.decode(force_encoding) if chardet: enc_guess = chardet.detect(obj) if enc_guess['confidence'] > 0.7: return obj.decode(enc_guess['encoding']) return obj.decode('utf-8') else: return obj if not force else repr(obj)
[ "Convert", "or", "dump", "object", "to", "unicode", "." ]
mk-fg/python-onedrive
python
https://github.com/mk-fg/python-onedrive/blob/74d3f6605b0e8a9031a2aab8092f551293ffb533/onedrive/cli_tool.py#L63-L74
[ "def", "decode_obj", "(", "obj", ",", "force", "=", "False", ")", ":", "if", "isinstance", "(", "obj", ",", "unicode", ")", ":", "return", "obj", "elif", "isinstance", "(", "obj", ",", "bytes", ")", ":", "if", "force_encoding", "is", "not", "None", ":", "return", "obj", ".", "decode", "(", "force_encoding", ")", "if", "chardet", ":", "enc_guess", "=", "chardet", ".", "detect", "(", "obj", ")", "if", "enc_guess", "[", "'confidence'", "]", ">", "0.7", ":", "return", "obj", ".", "decode", "(", "enc_guess", "[", "'encoding'", "]", ")", "return", "obj", ".", "decode", "(", "'utf-8'", ")", "else", ":", "return", "obj", "if", "not", "force", "else", "repr", "(", "obj", ")" ]
74d3f6605b0e8a9031a2aab8092f551293ffb533
test
set_drop_target
Recursively create and set the drop target for obj and childs
gui/tools/toolbox.py
def set_drop_target(obj, root, designer, inspector): "Recursively create and set the drop target for obj and childs" if obj._meta.container: dt = ToolBoxDropTarget(obj, root, designer=designer, inspector=inspector) obj.drop_target = dt for child in obj: set_drop_target(child, root, designer, inspector)
def set_drop_target(obj, root, designer, inspector): "Recursively create and set the drop target for obj and childs" if obj._meta.container: dt = ToolBoxDropTarget(obj, root, designer=designer, inspector=inspector) obj.drop_target = dt for child in obj: set_drop_target(child, root, designer, inspector)
[ "Recursively", "create", "and", "set", "the", "drop", "target", "for", "obj", "and", "childs" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/toolbox.py#L227-L234
[ "def", "set_drop_target", "(", "obj", ",", "root", ",", "designer", ",", "inspector", ")", ":", "if", "obj", ".", "_meta", ".", "container", ":", "dt", "=", "ToolBoxDropTarget", "(", "obj", ",", "root", ",", "designer", "=", "designer", ",", "inspector", "=", "inspector", ")", "obj", ".", "drop_target", "=", "dt", "for", "child", "in", "obj", ":", "set_drop_target", "(", "child", ",", "root", ",", "designer", ",", "inspector", ")" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
ToolBox.tool_click
Event handler tool selection (just add to default handler)
gui/tools/toolbox.py
def tool_click(self, evt): "Event handler tool selection (just add to default handler)" # get the control ctrl = self.menu_ctrl_map[evt.GetId()] # create the control on the parent: if self.inspector.selected_obj: # find the first parent drop target parent = self.inspector.selected_obj while parent.drop_target is None and parent.get_parent(): parent = parent.get_parent() # create the new object obj = ctrl(parent, name="%s_%s" % (ctrl._meta.name.lower(), wx.NewId()), pos=(0, 0), designer=self.designer) # associate the object with the toolbox: if obj._meta.container: dt = ToolBoxDropTarget(obj, self.inspector.root_obj, designer=self.designer, inspector=self.inspector) obj.drop_target = dt # fix width and height if default is not visible w, h = obj.size if w <= 10: obj.width = 100 if h <= 10: obj.height = 20 # update the object at the inspector (to show the new control) if self.inspector: self.inspector.load_object(self.inspector.root_obj) # refresh tree self.inspector.inspect(obj)
def tool_click(self, evt): "Event handler tool selection (just add to default handler)" # get the control ctrl = self.menu_ctrl_map[evt.GetId()] # create the control on the parent: if self.inspector.selected_obj: # find the first parent drop target parent = self.inspector.selected_obj while parent.drop_target is None and parent.get_parent(): parent = parent.get_parent() # create the new object obj = ctrl(parent, name="%s_%s" % (ctrl._meta.name.lower(), wx.NewId()), pos=(0, 0), designer=self.designer) # associate the object with the toolbox: if obj._meta.container: dt = ToolBoxDropTarget(obj, self.inspector.root_obj, designer=self.designer, inspector=self.inspector) obj.drop_target = dt # fix width and height if default is not visible w, h = obj.size if w <= 10: obj.width = 100 if h <= 10: obj.height = 20 # update the object at the inspector (to show the new control) if self.inspector: self.inspector.load_object(self.inspector.root_obj) # refresh tree self.inspector.inspect(obj)
[ "Event", "handler", "tool", "selection", "(", "just", "add", "to", "default", "handler", ")" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/toolbox.py#L67-L98
[ "def", "tool_click", "(", "self", ",", "evt", ")", ":", "# get the control", "ctrl", "=", "self", ".", "menu_ctrl_map", "[", "evt", ".", "GetId", "(", ")", "]", "# create the control on the parent:", "if", "self", ".", "inspector", ".", "selected_obj", ":", "# find the first parent drop target", "parent", "=", "self", ".", "inspector", ".", "selected_obj", "while", "parent", ".", "drop_target", "is", "None", "and", "parent", ".", "get_parent", "(", ")", ":", "parent", "=", "parent", ".", "get_parent", "(", ")", "# create the new object", "obj", "=", "ctrl", "(", "parent", ",", "name", "=", "\"%s_%s\"", "%", "(", "ctrl", ".", "_meta", ".", "name", ".", "lower", "(", ")", ",", "wx", ".", "NewId", "(", ")", ")", ",", "pos", "=", "(", "0", ",", "0", ")", ",", "designer", "=", "self", ".", "designer", ")", "# associate the object with the toolbox:", "if", "obj", ".", "_meta", ".", "container", ":", "dt", "=", "ToolBoxDropTarget", "(", "obj", ",", "self", ".", "inspector", ".", "root_obj", ",", "designer", "=", "self", ".", "designer", ",", "inspector", "=", "self", ".", "inspector", ")", "obj", ".", "drop_target", "=", "dt", "# fix width and height if default is not visible", "w", ",", "h", "=", "obj", ".", "size", "if", "w", "<=", "10", ":", "obj", ".", "width", "=", "100", "if", "h", "<=", "10", ":", "obj", ".", "height", "=", "20", "# update the object at the inspector (to show the new control)", "if", "self", ".", "inspector", ":", "self", ".", "inspector", ".", "load_object", "(", "self", ".", "inspector", ".", "root_obj", ")", "# refresh tree", "self", ".", "inspector", ".", "inspect", "(", "obj", ")" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
ToolBox.start_drag_opperation
Event handler for drag&drop functionality
gui/tools/toolbox.py
def start_drag_opperation(self, evt): "Event handler for drag&drop functionality" # get the control ctrl = self.menu_ctrl_map[evt.GetToolId()] # create our own data format and use it in a custom data object ldata = wx.CustomDataObject("gui") ldata.SetData(ctrl._meta.name) # only strings are allowed! # Also create a Bitmap version of the drawing bmp = ctrl._image.GetBitmap() # Now make a data object for the bitmap and also a composite # data object holding both of the others. bdata = wx.BitmapDataObject(bmp) data = wx.DataObjectComposite() data.Add(ldata) data.Add(bdata) # And finally, create the drop source and begin the drag # and drop opperation dropSource = wx.DropSource(self) dropSource.SetData(data) if DEBUG: print("Begining DragDrop\n") result = dropSource.DoDragDrop(wx.Drag_AllowMove) if DEBUG: print("DragDrop completed: %d\n" % result) if result == wx.DragMove: if DEBUG: print "dragmove!" self.Refresh()
def start_drag_opperation(self, evt): "Event handler for drag&drop functionality" # get the control ctrl = self.menu_ctrl_map[evt.GetToolId()] # create our own data format and use it in a custom data object ldata = wx.CustomDataObject("gui") ldata.SetData(ctrl._meta.name) # only strings are allowed! # Also create a Bitmap version of the drawing bmp = ctrl._image.GetBitmap() # Now make a data object for the bitmap and also a composite # data object holding both of the others. bdata = wx.BitmapDataObject(bmp) data = wx.DataObjectComposite() data.Add(ldata) data.Add(bdata) # And finally, create the drop source and begin the drag # and drop opperation dropSource = wx.DropSource(self) dropSource.SetData(data) if DEBUG: print("Begining DragDrop\n") result = dropSource.DoDragDrop(wx.Drag_AllowMove) if DEBUG: print("DragDrop completed: %d\n" % result) if result == wx.DragMove: if DEBUG: print "dragmove!" self.Refresh()
[ "Event", "handler", "for", "drag&drop", "functionality" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/toolbox.py#L101-L131
[ "def", "start_drag_opperation", "(", "self", ",", "evt", ")", ":", "# get the control", "ctrl", "=", "self", ".", "menu_ctrl_map", "[", "evt", ".", "GetToolId", "(", ")", "]", "# create our own data format and use it in a custom data object", "ldata", "=", "wx", ".", "CustomDataObject", "(", "\"gui\"", ")", "ldata", ".", "SetData", "(", "ctrl", ".", "_meta", ".", "name", ")", "# only strings are allowed!", "# Also create a Bitmap version of the drawing", "bmp", "=", "ctrl", ".", "_image", ".", "GetBitmap", "(", ")", "# Now make a data object for the bitmap and also a composite", "# data object holding both of the others.", "bdata", "=", "wx", ".", "BitmapDataObject", "(", "bmp", ")", "data", "=", "wx", ".", "DataObjectComposite", "(", ")", "data", ".", "Add", "(", "ldata", ")", "data", ".", "Add", "(", "bdata", ")", "# And finally, create the drop source and begin the drag", "# and drop opperation", "dropSource", "=", "wx", ".", "DropSource", "(", "self", ")", "dropSource", ".", "SetData", "(", "data", ")", "if", "DEBUG", ":", "print", "(", "\"Begining DragDrop\\n\"", ")", "result", "=", "dropSource", ".", "DoDragDrop", "(", "wx", ".", "Drag_AllowMove", ")", "if", "DEBUG", ":", "print", "(", "\"DragDrop completed: %d\\n\"", "%", "result", ")", "if", "result", "==", "wx", ".", "DragMove", ":", "if", "DEBUG", ":", "print", "\"dragmove!\"", "self", ".", "Refresh", "(", ")" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
ToolBox.set_default_tlw
track default top level window for toolbox menu default action
gui/tools/toolbox.py
def set_default_tlw(self, tlw, designer, inspector): "track default top level window for toolbox menu default action" self.designer = designer self.inspector = inspector
def set_default_tlw(self, tlw, designer, inspector): "track default top level window for toolbox menu default action" self.designer = designer self.inspector = inspector
[ "track", "default", "top", "level", "window", "for", "toolbox", "menu", "default", "action" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/toolbox.py#L133-L136
[ "def", "set_default_tlw", "(", "self", ",", "tlw", ",", "designer", ",", "inspector", ")", ":", "self", ".", "designer", "=", "designer", "self", ".", "inspector", "=", "inspector" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
ToolBoxDropTarget.copy
Return a copy of the drop target (to avoid wx problems on rebuild)
gui/tools/toolbox.py
def copy(self): "Return a copy of the drop target (to avoid wx problems on rebuild)" return ToolBoxDropTarget(self.dv, self.root, self.designer, self.inspector)
def copy(self): "Return a copy of the drop target (to avoid wx problems on rebuild)" return ToolBoxDropTarget(self.dv, self.root, self.designer, self.inspector)
[ "Return", "a", "copy", "of", "the", "drop", "target", "(", "to", "avoid", "wx", "problems", "on", "rebuild", ")" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/toolbox.py#L219-L222
[ "def", "copy", "(", "self", ")", ":", "return", "ToolBoxDropTarget", "(", "self", ".", "dv", ",", "self", ".", "root", ",", "self", ".", "designer", ",", "self", ".", "inspector", ")" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
inspect
Open the inspector windows for a given object
gui/__init__.py
def inspect(obj): "Open the inspector windows for a given object" from gui.tools.inspector import InspectorTool inspector = InspectorTool() inspector.show(obj) return inspector
def inspect(obj): "Open the inspector windows for a given object" from gui.tools.inspector import InspectorTool inspector = InspectorTool() inspector.show(obj) return inspector
[ "Open", "the", "inspector", "windows", "for", "a", "given", "object" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/__init__.py#L59-L64
[ "def", "inspect", "(", "obj", ")", ":", "from", "gui", ".", "tools", ".", "inspector", "import", "InspectorTool", "inspector", "=", "InspectorTool", "(", ")", "inspector", ".", "show", "(", "obj", ")", "return", "inspector" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
shell
Open a shell
gui/__init__.py
def shell(): "Open a shell" from gui.tools.debug import Shell shell = Shell() shell.show() return shell
def shell(): "Open a shell" from gui.tools.debug import Shell shell = Shell() shell.show() return shell
[ "Open", "a", "shell" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/__init__.py#L66-L71
[ "def", "shell", "(", ")", ":", "from", "gui", ".", "tools", ".", "debug", "import", "Shell", "shell", "=", "Shell", "(", ")", "shell", ".", "show", "(", ")", "return", "shell" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
migrate_window
Take a pythoncard background resource and convert to a gui2py window
gui/tools/migrate.py
def migrate_window(bg): "Take a pythoncard background resource and convert to a gui2py window" ret = {} for k, v in bg.items(): if k == 'type': v = WIN_MAP[v]._meta.name elif k == 'menubar': menus = v['menus'] v = [migrate_control(menu) for menu in menus] elif k == 'components': v = [migrate_control(comp) for comp in v] else: k = SPEC_MAP['Widget'].get(k, k) ret[k] = v return ret
def migrate_window(bg): "Take a pythoncard background resource and convert to a gui2py window" ret = {} for k, v in bg.items(): if k == 'type': v = WIN_MAP[v]._meta.name elif k == 'menubar': menus = v['menus'] v = [migrate_control(menu) for menu in menus] elif k == 'components': v = [migrate_control(comp) for comp in v] else: k = SPEC_MAP['Widget'].get(k, k) ret[k] = v return ret
[ "Take", "a", "pythoncard", "background", "resource", "and", "convert", "to", "a", "gui2py", "window" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/migrate.py#L149-L163
[ "def", "migrate_window", "(", "bg", ")", ":", "ret", "=", "{", "}", "for", "k", ",", "v", "in", "bg", ".", "items", "(", ")", ":", "if", "k", "==", "'type'", ":", "v", "=", "WIN_MAP", "[", "v", "]", ".", "_meta", ".", "name", "elif", "k", "==", "'menubar'", ":", "menus", "=", "v", "[", "'menus'", "]", "v", "=", "[", "migrate_control", "(", "menu", ")", "for", "menu", "in", "menus", "]", "elif", "k", "==", "'components'", ":", "v", "=", "[", "migrate_control", "(", "comp", ")", "for", "comp", "in", "v", "]", "else", ":", "k", "=", "SPEC_MAP", "[", "'Widget'", "]", ".", "get", "(", "k", ",", "k", ")", "ret", "[", "k", "]", "=", "v", "return", "ret" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
migrate_control
Take a pythoncard background resource and convert to a gui2py window
gui/tools/migrate.py
def migrate_control(comp): "Take a pythoncard background resource and convert to a gui2py window" ret = {} for k, v in comp.items(): if k == 'type': v = CTRL_MAP[v]._meta.name elif k == 'menubar': pass elif k == 'components': v = [migrate_control(comp) for comp in v] else: k = SPEC_MAP['Widget'].get(k, k) if comp['type'] in SPEC_MAP: k = SPEC_MAP[comp['type']].get(k, k) if k == 'font': v = migrate_font(v) ret[k] = v return ret
def migrate_control(comp): "Take a pythoncard background resource and convert to a gui2py window" ret = {} for k, v in comp.items(): if k == 'type': v = CTRL_MAP[v]._meta.name elif k == 'menubar': pass elif k == 'components': v = [migrate_control(comp) for comp in v] else: k = SPEC_MAP['Widget'].get(k, k) if comp['type'] in SPEC_MAP: k = SPEC_MAP[comp['type']].get(k, k) if k == 'font': v = migrate_font(v) ret[k] = v return ret
[ "Take", "a", "pythoncard", "background", "resource", "and", "convert", "to", "a", "gui2py", "window" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/migrate.py#L166-L183
[ "def", "migrate_control", "(", "comp", ")", ":", "ret", "=", "{", "}", "for", "k", ",", "v", "in", "comp", ".", "items", "(", ")", ":", "if", "k", "==", "'type'", ":", "v", "=", "CTRL_MAP", "[", "v", "]", ".", "_meta", ".", "name", "elif", "k", "==", "'menubar'", ":", "pass", "elif", "k", "==", "'components'", ":", "v", "=", "[", "migrate_control", "(", "comp", ")", "for", "comp", "in", "v", "]", "else", ":", "k", "=", "SPEC_MAP", "[", "'Widget'", "]", ".", "get", "(", "k", ",", "k", ")", "if", "comp", "[", "'type'", "]", "in", "SPEC_MAP", ":", "k", "=", "SPEC_MAP", "[", "comp", "[", "'type'", "]", "]", ".", "get", "(", "k", ",", "k", ")", "if", "k", "==", "'font'", ":", "v", "=", "migrate_font", "(", "v", ")", "ret", "[", "k", "]", "=", "v", "return", "ret" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
migrate_font
Convert PythonCard font description to gui2py style
gui/tools/migrate.py
def migrate_font(font): "Convert PythonCard font description to gui2py style" if 'faceName' in font: font['face'] = font.pop('faceName') if 'family' in font and font['family'] == 'sansSerif': font['family'] = 'sans serif' return font
def migrate_font(font): "Convert PythonCard font description to gui2py style" if 'faceName' in font: font['face'] = font.pop('faceName') if 'family' in font and font['family'] == 'sansSerif': font['family'] = 'sans serif' return font
[ "Convert", "PythonCard", "font", "description", "to", "gui2py", "style" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/migrate.py#L186-L192
[ "def", "migrate_font", "(", "font", ")", ":", "if", "'faceName'", "in", "font", ":", "font", "[", "'face'", "]", "=", "font", ".", "pop", "(", "'faceName'", ")", "if", "'family'", "in", "font", "and", "font", "[", "'family'", "]", "==", "'sansSerif'", ":", "font", "[", "'family'", "]", "=", "'sans serif'", "return", "font" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
HtmlBox.load_page
Loads HTML page from location and then displays it
gui/controls/htmlbox.py
def load_page(self, location): "Loads HTML page from location and then displays it" if not location: self.wx_obj.SetPage("") else: self.wx_obj.LoadPage(location)
def load_page(self, location): "Loads HTML page from location and then displays it" if not location: self.wx_obj.SetPage("") else: self.wx_obj.LoadPage(location)
[ "Loads", "HTML", "page", "from", "location", "and", "then", "displays", "it" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/controls/htmlbox.py#L67-L72
[ "def", "load_page", "(", "self", ",", "location", ")", ":", "if", "not", "location", ":", "self", ".", "wx_obj", ".", "SetPage", "(", "\"\"", ")", "else", ":", "self", ".", "wx_obj", ".", "LoadPage", "(", "location", ")" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
PropertyEditorPanel.edit
Programatically select a (default) property to start editing it
gui/tools/propeditor.py
def edit(self, name=""): "Programatically select a (default) property to start editing it" # for more info see DoSelectAndEdit in propgrid.cpp for name in (name, "label", "value", "text", "title", "filename", "name"): prop = self.pg.GetPropertyByName(name) if prop is not None: break self.Parent.SetFocus() self.Parent.Raise() self.pg.SetFocus() # give time to the ui to show the prop grid and set focus: wx.CallLater(250, self.select, prop.GetName())
def edit(self, name=""): "Programatically select a (default) property to start editing it" # for more info see DoSelectAndEdit in propgrid.cpp for name in (name, "label", "value", "text", "title", "filename", "name"): prop = self.pg.GetPropertyByName(name) if prop is not None: break self.Parent.SetFocus() self.Parent.Raise() self.pg.SetFocus() # give time to the ui to show the prop grid and set focus: wx.CallLater(250, self.select, prop.GetName())
[ "Programatically", "select", "a", "(", "default", ")", "property", "to", "start", "editing", "it" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/propeditor.py#L183-L195
[ "def", "edit", "(", "self", ",", "name", "=", "\"\"", ")", ":", "# for more info see DoSelectAndEdit in propgrid.cpp", "for", "name", "in", "(", "name", ",", "\"label\"", ",", "\"value\"", ",", "\"text\"", ",", "\"title\"", ",", "\"filename\"", ",", "\"name\"", ")", ":", "prop", "=", "self", ".", "pg", ".", "GetPropertyByName", "(", "name", ")", "if", "prop", "is", "not", "None", ":", "break", "self", ".", "Parent", ".", "SetFocus", "(", ")", "self", ".", "Parent", ".", "Raise", "(", ")", "self", ".", "pg", ".", "SetFocus", "(", ")", "# give time to the ui to show the prop grid and set focus:", "wx", ".", "CallLater", "(", "250", ",", "self", ".", "select", ",", "prop", ".", "GetName", "(", ")", ")" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4
test
PropertyEditorPanel.select
Select a property (and start the editor)
gui/tools/propeditor.py
def select(self, name, flags=0): "Select a property (and start the editor)" # do not call this directly from another window, use edit() instead # // wxPropertyGrid::DoSelectProperty flags (selFlags) -see propgrid.h- wxPG_SEL_FOCUS=0x0001 # Focuses to created editor wxPG_SEL_FORCE=0x0002 # Forces deletion and recreation of editor flags |= wxPG_SEL_FOCUS # | wxPG_SEL_FORCE prop = self.pg.GetPropertyByName(name) self.pg.SelectProperty(prop, flags) if DEBUG: print "selected!", prop
def select(self, name, flags=0): "Select a property (and start the editor)" # do not call this directly from another window, use edit() instead # // wxPropertyGrid::DoSelectProperty flags (selFlags) -see propgrid.h- wxPG_SEL_FOCUS=0x0001 # Focuses to created editor wxPG_SEL_FORCE=0x0002 # Forces deletion and recreation of editor flags |= wxPG_SEL_FOCUS # | wxPG_SEL_FORCE prop = self.pg.GetPropertyByName(name) self.pg.SelectProperty(prop, flags) if DEBUG: print "selected!", prop
[ "Select", "a", "property", "(", "and", "start", "the", "editor", ")" ]
reingart/gui2py
python
https://github.com/reingart/gui2py/blob/aca0a05f6fcde55c94ad7cc058671a06608b01a4/gui/tools/propeditor.py#L197-L206
[ "def", "select", "(", "self", ",", "name", ",", "flags", "=", "0", ")", ":", "# do not call this directly from another window, use edit() instead", "# // wxPropertyGrid::DoSelectProperty flags (selFlags) -see propgrid.h-", "wxPG_SEL_FOCUS", "=", "0x0001", "# Focuses to created editor", "wxPG_SEL_FORCE", "=", "0x0002", "# Forces deletion and recreation of editor", "flags", "|=", "wxPG_SEL_FOCUS", "# | wxPG_SEL_FORCE", "prop", "=", "self", ".", "pg", ".", "GetPropertyByName", "(", "name", ")", "self", ".", "pg", ".", "SelectProperty", "(", "prop", ",", "flags", ")", "if", "DEBUG", ":", "print", "\"selected!\"", ",", "prop" ]
aca0a05f6fcde55c94ad7cc058671a06608b01a4