commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
df4e3d8a1db1b9195b70d95eb4fdcd45a7ca4b23
|
util/device_profile_data.py
|
util/device_profile_data.py
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
"""Parse device output to extract LLVM profile data.
The profile data obtained from the device is raw. Thus, it must be indexed
before it can be used to generate coverage reports.
Typical usage:
stty -F /dev/ttyUSB0 icanon
cat /dev/ttyUSB0 | ./device_profile_data.py foo.profraw
llvm-profdata merge -sparse foo.profraw -o foo.profdata
llvm-cov show OBJECT_FILE -instr-profile=foo.profdata
"""
import argparse
import zlib
import re
import sys
def extract_profile_data(device_output_file):
"""Parse device output to extract LLVM profile data.
This function returns the LLVM profile data as a byte array after
verifying its length and checksum.
Args:
device_output_file: File that contains the device output.
Returns:
LLVM profile data.
Raises:
ValueError: If LLVM profile data cannot be detected in the device
output or its length or checksum is incorrect.
"""
lines = device_output_file.read().decode('utf-8', 'ignore').splitlines()
for i, line in zip(reversed(range(len(lines))), reversed(lines)):
match = re.match(
r"""
LLVM\ profile\ data
\ \(length:\ (?P<length>\d*),
\ CRC32:\ (?P<checksum>[0-9A-F]*)\):
""", line, re.VERBOSE)
if match:
exp_length = int(match.group('length'))
exp_checksum = match.group('checksum')
byte_array = bytes.fromhex(lines[i + 1])
break
# Check if output has LLVM profile data
if not match:
raise ValueError(
'Could not detect the LLVM profile data in device output.')
# Check length
act_length = len(byte_array)
if act_length != exp_length:
raise ValueError(('Length check failed! ',
f'Expected: {exp_length}, actual: {act_length}.'))
# Check checksum
act_checksum = zlib.crc32(byte_array).to_bytes(4,
byteorder='little',
signed=False).hex().upper()
if act_checksum != exp_checksum:
raise ValueError(
('Checksum check failed! ',
f'Expected: {exp_checksum}, actual: {act_checksum}.'))
return byte_array
def main():
"""Parses command line arguments and extracts the profile data from device
output."""
argparser = argparse.ArgumentParser(
description='Extract LLVM profile data from device output.')
argparser.add_argument(dest='output_file',
type=argparse.FileType('wb'),
default=sys.stdout,
help='output file for writing LLVM profile data')
argparser.add_argument('--input_file',
type=argparse.FileType('rb'),
default=sys.stdin.buffer,
help='device output')
args = argparser.parse_args()
args.output_file.write(extract_profile_data(args.input_file))
if __name__ == '__main__':
main()
|
Implement a script for extracting profile data from device output
|
[util] Implement a script for extracting profile data from device output
This change introduces a python script that extracts profile data from
device output.
Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com>
|
Python
|
apache-2.0
|
lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan
|
[util] Implement a script for extracting profile data from device output
This change introduces a python script that extracts profile data from
device output.
Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com>
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
"""Parse device output to extract LLVM profile data.
The profile data obtained from the device is raw. Thus, it must be indexed
before it can be used to generate coverage reports.
Typical usage:
stty -F /dev/ttyUSB0 icanon
cat /dev/ttyUSB0 | ./device_profile_data.py foo.profraw
llvm-profdata merge -sparse foo.profraw -o foo.profdata
llvm-cov show OBJECT_FILE -instr-profile=foo.profdata
"""
import argparse
import zlib
import re
import sys
def extract_profile_data(device_output_file):
"""Parse device output to extract LLVM profile data.
This function returns the LLVM profile data as a byte array after
verifying its length and checksum.
Args:
device_output_file: File that contains the device output.
Returns:
LLVM profile data.
Raises:
ValueError: If LLVM profile data cannot be detected in the device
output or its length or checksum is incorrect.
"""
lines = device_output_file.read().decode('utf-8', 'ignore').splitlines()
for i, line in zip(reversed(range(len(lines))), reversed(lines)):
match = re.match(
r"""
LLVM\ profile\ data
\ \(length:\ (?P<length>\d*),
\ CRC32:\ (?P<checksum>[0-9A-F]*)\):
""", line, re.VERBOSE)
if match:
exp_length = int(match.group('length'))
exp_checksum = match.group('checksum')
byte_array = bytes.fromhex(lines[i + 1])
break
# Check if output has LLVM profile data
if not match:
raise ValueError(
'Could not detect the LLVM profile data in device output.')
# Check length
act_length = len(byte_array)
if act_length != exp_length:
raise ValueError(('Length check failed! ',
f'Expected: {exp_length}, actual: {act_length}.'))
# Check checksum
act_checksum = zlib.crc32(byte_array).to_bytes(4,
byteorder='little',
signed=False).hex().upper()
if act_checksum != exp_checksum:
raise ValueError(
('Checksum check failed! ',
f'Expected: {exp_checksum}, actual: {act_checksum}.'))
return byte_array
def main():
"""Parses command line arguments and extracts the profile data from device
output."""
argparser = argparse.ArgumentParser(
description='Extract LLVM profile data from device output.')
argparser.add_argument(dest='output_file',
type=argparse.FileType('wb'),
default=sys.stdout,
help='output file for writing LLVM profile data')
argparser.add_argument('--input_file',
type=argparse.FileType('rb'),
default=sys.stdin.buffer,
help='device output')
args = argparser.parse_args()
args.output_file.write(extract_profile_data(args.input_file))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>[util] Implement a script for extracting profile data from device output
This change introduces a python script that extracts profile data from
device output.
Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com><commit_after>
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
"""Parse device output to extract LLVM profile data.
The profile data obtained from the device is raw. Thus, it must be indexed
before it can be used to generate coverage reports.
Typical usage:
stty -F /dev/ttyUSB0 icanon
cat /dev/ttyUSB0 | ./device_profile_data.py foo.profraw
llvm-profdata merge -sparse foo.profraw -o foo.profdata
llvm-cov show OBJECT_FILE -instr-profile=foo.profdata
"""
import argparse
import zlib
import re
import sys
def extract_profile_data(device_output_file):
"""Parse device output to extract LLVM profile data.
This function returns the LLVM profile data as a byte array after
verifying its length and checksum.
Args:
device_output_file: File that contains the device output.
Returns:
LLVM profile data.
Raises:
ValueError: If LLVM profile data cannot be detected in the device
output or its length or checksum is incorrect.
"""
lines = device_output_file.read().decode('utf-8', 'ignore').splitlines()
for i, line in zip(reversed(range(len(lines))), reversed(lines)):
match = re.match(
r"""
LLVM\ profile\ data
\ \(length:\ (?P<length>\d*),
\ CRC32:\ (?P<checksum>[0-9A-F]*)\):
""", line, re.VERBOSE)
if match:
exp_length = int(match.group('length'))
exp_checksum = match.group('checksum')
byte_array = bytes.fromhex(lines[i + 1])
break
# Check if output has LLVM profile data
if not match:
raise ValueError(
'Could not detect the LLVM profile data in device output.')
# Check length
act_length = len(byte_array)
if act_length != exp_length:
raise ValueError(('Length check failed! ',
f'Expected: {exp_length}, actual: {act_length}.'))
# Check checksum
act_checksum = zlib.crc32(byte_array).to_bytes(4,
byteorder='little',
signed=False).hex().upper()
if act_checksum != exp_checksum:
raise ValueError(
('Checksum check failed! ',
f'Expected: {exp_checksum}, actual: {act_checksum}.'))
return byte_array
def main():
"""Parses command line arguments and extracts the profile data from device
output."""
argparser = argparse.ArgumentParser(
description='Extract LLVM profile data from device output.')
argparser.add_argument(dest='output_file',
type=argparse.FileType('wb'),
default=sys.stdout,
help='output file for writing LLVM profile data')
argparser.add_argument('--input_file',
type=argparse.FileType('rb'),
default=sys.stdin.buffer,
help='device output')
args = argparser.parse_args()
args.output_file.write(extract_profile_data(args.input_file))
if __name__ == '__main__':
main()
|
[util] Implement a script for extracting profile data from device output
This change introduces a python script that extracts profile data from
device output.
Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com>#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
"""Parse device output to extract LLVM profile data.
The profile data obtained from the device is raw. Thus, it must be indexed
before it can be used to generate coverage reports.
Typical usage:
stty -F /dev/ttyUSB0 icanon
cat /dev/ttyUSB0 | ./device_profile_data.py foo.profraw
llvm-profdata merge -sparse foo.profraw -o foo.profdata
llvm-cov show OBJECT_FILE -instr-profile=foo.profdata
"""
import argparse
import zlib
import re
import sys
def extract_profile_data(device_output_file):
"""Parse device output to extract LLVM profile data.
This function returns the LLVM profile data as a byte array after
verifying its length and checksum.
Args:
device_output_file: File that contains the device output.
Returns:
LLVM profile data.
Raises:
ValueError: If LLVM profile data cannot be detected in the device
output or its length or checksum is incorrect.
"""
lines = device_output_file.read().decode('utf-8', 'ignore').splitlines()
for i, line in zip(reversed(range(len(lines))), reversed(lines)):
match = re.match(
r"""
LLVM\ profile\ data
\ \(length:\ (?P<length>\d*),
\ CRC32:\ (?P<checksum>[0-9A-F]*)\):
""", line, re.VERBOSE)
if match:
exp_length = int(match.group('length'))
exp_checksum = match.group('checksum')
byte_array = bytes.fromhex(lines[i + 1])
break
# Check if output has LLVM profile data
if not match:
raise ValueError(
'Could not detect the LLVM profile data in device output.')
# Check length
act_length = len(byte_array)
if act_length != exp_length:
raise ValueError(('Length check failed! ',
f'Expected: {exp_length}, actual: {act_length}.'))
# Check checksum
act_checksum = zlib.crc32(byte_array).to_bytes(4,
byteorder='little',
signed=False).hex().upper()
if act_checksum != exp_checksum:
raise ValueError(
('Checksum check failed! ',
f'Expected: {exp_checksum}, actual: {act_checksum}.'))
return byte_array
def main():
"""Parses command line arguments and extracts the profile data from device
output."""
argparser = argparse.ArgumentParser(
description='Extract LLVM profile data from device output.')
argparser.add_argument(dest='output_file',
type=argparse.FileType('wb'),
default=sys.stdout,
help='output file for writing LLVM profile data')
argparser.add_argument('--input_file',
type=argparse.FileType('rb'),
default=sys.stdin.buffer,
help='device output')
args = argparser.parse_args()
args.output_file.write(extract_profile_data(args.input_file))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>[util] Implement a script for extracting profile data from device output
This change introduces a python script that extracts profile data from
device output.
Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com><commit_after>#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
"""Parse device output to extract LLVM profile data.
The profile data obtained from the device is raw. Thus, it must be indexed
before it can be used to generate coverage reports.
Typical usage:
stty -F /dev/ttyUSB0 icanon
cat /dev/ttyUSB0 | ./device_profile_data.py foo.profraw
llvm-profdata merge -sparse foo.profraw -o foo.profdata
llvm-cov show OBJECT_FILE -instr-profile=foo.profdata
"""
import argparse
import zlib
import re
import sys
def extract_profile_data(device_output_file):
"""Parse device output to extract LLVM profile data.
This function returns the LLVM profile data as a byte array after
verifying its length and checksum.
Args:
device_output_file: File that contains the device output.
Returns:
LLVM profile data.
Raises:
ValueError: If LLVM profile data cannot be detected in the device
output or its length or checksum is incorrect.
"""
lines = device_output_file.read().decode('utf-8', 'ignore').splitlines()
for i, line in zip(reversed(range(len(lines))), reversed(lines)):
match = re.match(
r"""
LLVM\ profile\ data
\ \(length:\ (?P<length>\d*),
\ CRC32:\ (?P<checksum>[0-9A-F]*)\):
""", line, re.VERBOSE)
if match:
exp_length = int(match.group('length'))
exp_checksum = match.group('checksum')
byte_array = bytes.fromhex(lines[i + 1])
break
# Check if output has LLVM profile data
if not match:
raise ValueError(
'Could not detect the LLVM profile data in device output.')
# Check length
act_length = len(byte_array)
if act_length != exp_length:
raise ValueError(('Length check failed! ',
f'Expected: {exp_length}, actual: {act_length}.'))
# Check checksum
act_checksum = zlib.crc32(byte_array).to_bytes(4,
byteorder='little',
signed=False).hex().upper()
if act_checksum != exp_checksum:
raise ValueError(
('Checksum check failed! ',
f'Expected: {exp_checksum}, actual: {act_checksum}.'))
return byte_array
def main():
"""Parses command line arguments and extracts the profile data from device
output."""
argparser = argparse.ArgumentParser(
description='Extract LLVM profile data from device output.')
argparser.add_argument(dest='output_file',
type=argparse.FileType('wb'),
default=sys.stdout,
help='output file for writing LLVM profile data')
argparser.add_argument('--input_file',
type=argparse.FileType('rb'),
default=sys.stdin.buffer,
help='device output')
args = argparser.parse_args()
args.output_file.write(extract_profile_data(args.input_file))
if __name__ == '__main__':
main()
|
|
dbc6e2b41ca6afc759e7bdad102e40fc7404d7de
|
Wrappers/Dials/SpaceGroup.py
|
Wrappers/Dials/SpaceGroup.py
|
from __future__ import absolute_import, division, print_function
import os
from xia2.Driver.DriverFactory import DriverFactory
from xia2.Handlers.Streams import Chatter, Debug
def DialsSpaceGroup(DriverType=None):
"""A factory for DialsSpaceGroupWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class DialsSpaceGroupWrapper(DriverInstance.__class__):
"""A wrapper for dials.space_group"""
def __init__(self):
# generic things
super(DialsSpaceGroupWrapper, self).__init__()
self.set_executable("dials.space_group")
# clear all the header junk
self.reset()
self._experiments_filename = None
self._reflections_filename = None
self._symmetrized_experiments = None
def set_experiments_filename(self, experiments_filename):
self._experiments_filename = experiments_filename
def get_experiments_filename(self):
return self._experiments_filename
def set_reflections_filename(self, reflections_filename):
self._reflections_filename = reflections_filename
def get_reflections_filename(self):
return self._reflections_filename
def set_symmetrized_experiments(self, filepath):
self._symmetrized_experiments = filepath
def get_symmetrized_experiments(self):
return self._symmetrized_experiments
def run(self):
"""Run dials.space_group"""
self.clear_command_line()
assert self._experiments_filename
assert self._reflections_filename
self.add_command_line(self._reflections_filename)
self.add_command_line(self._experiments_filename)
if not self._symmetrized_experiments:
self._symmetrized_experiments = os.path.join(
self.get_working_directory(),
"%i_symmetrized.expt" % self.get_xpid(),
)
self.add_command_line(
"output.experiments=%s" % self._symmetrized_experiments
)
self.start()
self.close_wait()
# check for errors
try:
self.check_for_errors()
except Exception:
Chatter.write(
"dials.space_group failed, see log file for more details:\n %s"
% self.get_log_file()
)
raise
Debug.write("dials.space_group status: OK")
return DialsSpaceGroupWrapper()
|
Add missing wrapper for dials.space_group
|
Add missing wrapper for dials.space_group
|
Python
|
bsd-3-clause
|
xia2/xia2,xia2/xia2
|
Add missing wrapper for dials.space_group
|
from __future__ import absolute_import, division, print_function
import os
from xia2.Driver.DriverFactory import DriverFactory
from xia2.Handlers.Streams import Chatter, Debug
def DialsSpaceGroup(DriverType=None):
"""A factory for DialsSpaceGroupWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class DialsSpaceGroupWrapper(DriverInstance.__class__):
"""A wrapper for dials.space_group"""
def __init__(self):
# generic things
super(DialsSpaceGroupWrapper, self).__init__()
self.set_executable("dials.space_group")
# clear all the header junk
self.reset()
self._experiments_filename = None
self._reflections_filename = None
self._symmetrized_experiments = None
def set_experiments_filename(self, experiments_filename):
self._experiments_filename = experiments_filename
def get_experiments_filename(self):
return self._experiments_filename
def set_reflections_filename(self, reflections_filename):
self._reflections_filename = reflections_filename
def get_reflections_filename(self):
return self._reflections_filename
def set_symmetrized_experiments(self, filepath):
self._symmetrized_experiments = filepath
def get_symmetrized_experiments(self):
return self._symmetrized_experiments
def run(self):
"""Run dials.space_group"""
self.clear_command_line()
assert self._experiments_filename
assert self._reflections_filename
self.add_command_line(self._reflections_filename)
self.add_command_line(self._experiments_filename)
if not self._symmetrized_experiments:
self._symmetrized_experiments = os.path.join(
self.get_working_directory(),
"%i_symmetrized.expt" % self.get_xpid(),
)
self.add_command_line(
"output.experiments=%s" % self._symmetrized_experiments
)
self.start()
self.close_wait()
# check for errors
try:
self.check_for_errors()
except Exception:
Chatter.write(
"dials.space_group failed, see log file for more details:\n %s"
% self.get_log_file()
)
raise
Debug.write("dials.space_group status: OK")
return DialsSpaceGroupWrapper()
|
<commit_before><commit_msg>Add missing wrapper for dials.space_group<commit_after>
|
from __future__ import absolute_import, division, print_function
import os
from xia2.Driver.DriverFactory import DriverFactory
from xia2.Handlers.Streams import Chatter, Debug
def DialsSpaceGroup(DriverType=None):
"""A factory for DialsSpaceGroupWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class DialsSpaceGroupWrapper(DriverInstance.__class__):
"""A wrapper for dials.space_group"""
def __init__(self):
# generic things
super(DialsSpaceGroupWrapper, self).__init__()
self.set_executable("dials.space_group")
# clear all the header junk
self.reset()
self._experiments_filename = None
self._reflections_filename = None
self._symmetrized_experiments = None
def set_experiments_filename(self, experiments_filename):
self._experiments_filename = experiments_filename
def get_experiments_filename(self):
return self._experiments_filename
def set_reflections_filename(self, reflections_filename):
self._reflections_filename = reflections_filename
def get_reflections_filename(self):
return self._reflections_filename
def set_symmetrized_experiments(self, filepath):
self._symmetrized_experiments = filepath
def get_symmetrized_experiments(self):
return self._symmetrized_experiments
def run(self):
"""Run dials.space_group"""
self.clear_command_line()
assert self._experiments_filename
assert self._reflections_filename
self.add_command_line(self._reflections_filename)
self.add_command_line(self._experiments_filename)
if not self._symmetrized_experiments:
self._symmetrized_experiments = os.path.join(
self.get_working_directory(),
"%i_symmetrized.expt" % self.get_xpid(),
)
self.add_command_line(
"output.experiments=%s" % self._symmetrized_experiments
)
self.start()
self.close_wait()
# check for errors
try:
self.check_for_errors()
except Exception:
Chatter.write(
"dials.space_group failed, see log file for more details:\n %s"
% self.get_log_file()
)
raise
Debug.write("dials.space_group status: OK")
return DialsSpaceGroupWrapper()
|
Add missing wrapper for dials.space_groupfrom __future__ import absolute_import, division, print_function
import os
from xia2.Driver.DriverFactory import DriverFactory
from xia2.Handlers.Streams import Chatter, Debug
def DialsSpaceGroup(DriverType=None):
"""A factory for DialsSpaceGroupWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class DialsSpaceGroupWrapper(DriverInstance.__class__):
"""A wrapper for dials.space_group"""
def __init__(self):
# generic things
super(DialsSpaceGroupWrapper, self).__init__()
self.set_executable("dials.space_group")
# clear all the header junk
self.reset()
self._experiments_filename = None
self._reflections_filename = None
self._symmetrized_experiments = None
def set_experiments_filename(self, experiments_filename):
self._experiments_filename = experiments_filename
def get_experiments_filename(self):
return self._experiments_filename
def set_reflections_filename(self, reflections_filename):
self._reflections_filename = reflections_filename
def get_reflections_filename(self):
return self._reflections_filename
def set_symmetrized_experiments(self, filepath):
self._symmetrized_experiments = filepath
def get_symmetrized_experiments(self):
return self._symmetrized_experiments
def run(self):
"""Run dials.space_group"""
self.clear_command_line()
assert self._experiments_filename
assert self._reflections_filename
self.add_command_line(self._reflections_filename)
self.add_command_line(self._experiments_filename)
if not self._symmetrized_experiments:
self._symmetrized_experiments = os.path.join(
self.get_working_directory(),
"%i_symmetrized.expt" % self.get_xpid(),
)
self.add_command_line(
"output.experiments=%s" % self._symmetrized_experiments
)
self.start()
self.close_wait()
# check for errors
try:
self.check_for_errors()
except Exception:
Chatter.write(
"dials.space_group failed, see log file for more details:\n %s"
% self.get_log_file()
)
raise
Debug.write("dials.space_group status: OK")
return DialsSpaceGroupWrapper()
|
<commit_before><commit_msg>Add missing wrapper for dials.space_group<commit_after>from __future__ import absolute_import, division, print_function
import os
from xia2.Driver.DriverFactory import DriverFactory
from xia2.Handlers.Streams import Chatter, Debug
def DialsSpaceGroup(DriverType=None):
"""A factory for DialsSpaceGroupWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class DialsSpaceGroupWrapper(DriverInstance.__class__):
"""A wrapper for dials.space_group"""
def __init__(self):
# generic things
super(DialsSpaceGroupWrapper, self).__init__()
self.set_executable("dials.space_group")
# clear all the header junk
self.reset()
self._experiments_filename = None
self._reflections_filename = None
self._symmetrized_experiments = None
def set_experiments_filename(self, experiments_filename):
self._experiments_filename = experiments_filename
def get_experiments_filename(self):
return self._experiments_filename
def set_reflections_filename(self, reflections_filename):
self._reflections_filename = reflections_filename
def get_reflections_filename(self):
return self._reflections_filename
def set_symmetrized_experiments(self, filepath):
self._symmetrized_experiments = filepath
def get_symmetrized_experiments(self):
return self._symmetrized_experiments
def run(self):
"""Run dials.space_group"""
self.clear_command_line()
assert self._experiments_filename
assert self._reflections_filename
self.add_command_line(self._reflections_filename)
self.add_command_line(self._experiments_filename)
if not self._symmetrized_experiments:
self._symmetrized_experiments = os.path.join(
self.get_working_directory(),
"%i_symmetrized.expt" % self.get_xpid(),
)
self.add_command_line(
"output.experiments=%s" % self._symmetrized_experiments
)
self.start()
self.close_wait()
# check for errors
try:
self.check_for_errors()
except Exception:
Chatter.write(
"dials.space_group failed, see log file for more details:\n %s"
% self.get_log_file()
)
raise
Debug.write("dials.space_group status: OK")
return DialsSpaceGroupWrapper()
|
|
a948e35ddc1121d26a851fe1f06032115302c038
|
visuals_EM/plotly_arrows.py
|
visuals_EM/plotly_arrows.py
|
import numpy as np
import plotly.graph_objs as go
def p2c(r, theta, phi):
"""Convert polar unit vector to cartesians"""
return [r * np.sin(theta) * np.cos(phi),
r * np.sin(theta) * np.sin(phi),
r * np.cos(theta)]
class Arrow:
def __init__(self, theta, phi, out, width=5, color='rgb(0,0,0)'):
"""
Args:
theta (float) - radians [0, π]
phi (float) - radians [0, 2π]
out (bool) - True if outgoing, False if incoming (to the origin)
width (int) - line thickness
color (hex/rgb) - line color
"""
self.theta = theta
self.phi = phi
self.out = out
self.width = width
self.color = color
wing_length, wing_angle = self._find_wing_coord()
shaft_xyz = p2c(1., self.theta, self.phi)
wings_xyz = [p2c(wing_length, self.theta + wing_angle, self.phi),
p2c(wing_length, self.theta - wing_angle, self.phi)]
self.shaft = go.Scatter3d(
x=[0, shaft_xyz[0]],
y=[0, shaft_xyz[1]],
z=[0, shaft_xyz[2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing1 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[0][0]],
y=[shaft_xyz[1] / 2., wings_xyz[0][1]],
z=[shaft_xyz[2] / 2., wings_xyz[0][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing2 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[1][0]],
y=[shaft_xyz[1] / 2., wings_xyz[1][1]],
z=[shaft_xyz[2] / 2., wings_xyz[1][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.data = [self.shaft, self.wing1, self.wing2]
def _find_wing_coord(self):
"""Finds polar coordinates of arrowhead wing ends"""
frac = 0.1
r = 0.5
sin45 = np.sin(np.pi / 4.)
if self.out == True:
d = r - frac * sin45
elif self.out == False:
d = r + frac * sin45
else:
raise TypeError("arg: out must be True or False")
a = np.sqrt(frac**2 * sin45**2 + d**2)
alpha = np.arccos(d / a)
return [a, alpha]
|
Convert arrows notebook to importable py file
|
Convert arrows notebook to importable py file
|
Python
|
mit
|
cydcowley/Imperial-Visualizations,cydcowley/Imperial-Visualizations,cydcowley/Imperial-Visualizations,cydcowley/Imperial-Visualizations
|
Convert arrows notebook to importable py file
|
import numpy as np
import plotly.graph_objs as go
def p2c(r, theta, phi):
"""Convert polar unit vector to cartesians"""
return [r * np.sin(theta) * np.cos(phi),
r * np.sin(theta) * np.sin(phi),
r * np.cos(theta)]
class Arrow:
def __init__(self, theta, phi, out, width=5, color='rgb(0,0,0)'):
"""
Args:
theta (float) - radians [0, π]
phi (float) - radians [0, 2π]
out (bool) - True if outgoing, False if incoming (to the origin)
width (int) - line thickness
color (hex/rgb) - line color
"""
self.theta = theta
self.phi = phi
self.out = out
self.width = width
self.color = color
wing_length, wing_angle = self._find_wing_coord()
shaft_xyz = p2c(1., self.theta, self.phi)
wings_xyz = [p2c(wing_length, self.theta + wing_angle, self.phi),
p2c(wing_length, self.theta - wing_angle, self.phi)]
self.shaft = go.Scatter3d(
x=[0, shaft_xyz[0]],
y=[0, shaft_xyz[1]],
z=[0, shaft_xyz[2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing1 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[0][0]],
y=[shaft_xyz[1] / 2., wings_xyz[0][1]],
z=[shaft_xyz[2] / 2., wings_xyz[0][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing2 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[1][0]],
y=[shaft_xyz[1] / 2., wings_xyz[1][1]],
z=[shaft_xyz[2] / 2., wings_xyz[1][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.data = [self.shaft, self.wing1, self.wing2]
def _find_wing_coord(self):
"""Finds polar coordinates of arrowhead wing ends"""
frac = 0.1
r = 0.5
sin45 = np.sin(np.pi / 4.)
if self.out == True:
d = r - frac * sin45
elif self.out == False:
d = r + frac * sin45
else:
raise TypeError("arg: out must be True or False")
a = np.sqrt(frac**2 * sin45**2 + d**2)
alpha = np.arccos(d / a)
return [a, alpha]
|
<commit_before><commit_msg>Convert arrows notebook to importable py file<commit_after>
|
import numpy as np
import plotly.graph_objs as go
def p2c(r, theta, phi):
"""Convert polar unit vector to cartesians"""
return [r * np.sin(theta) * np.cos(phi),
r * np.sin(theta) * np.sin(phi),
r * np.cos(theta)]
class Arrow:
def __init__(self, theta, phi, out, width=5, color='rgb(0,0,0)'):
"""
Args:
theta (float) - radians [0, π]
phi (float) - radians [0, 2π]
out (bool) - True if outgoing, False if incoming (to the origin)
width (int) - line thickness
color (hex/rgb) - line color
"""
self.theta = theta
self.phi = phi
self.out = out
self.width = width
self.color = color
wing_length, wing_angle = self._find_wing_coord()
shaft_xyz = p2c(1., self.theta, self.phi)
wings_xyz = [p2c(wing_length, self.theta + wing_angle, self.phi),
p2c(wing_length, self.theta - wing_angle, self.phi)]
self.shaft = go.Scatter3d(
x=[0, shaft_xyz[0]],
y=[0, shaft_xyz[1]],
z=[0, shaft_xyz[2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing1 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[0][0]],
y=[shaft_xyz[1] / 2., wings_xyz[0][1]],
z=[shaft_xyz[2] / 2., wings_xyz[0][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing2 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[1][0]],
y=[shaft_xyz[1] / 2., wings_xyz[1][1]],
z=[shaft_xyz[2] / 2., wings_xyz[1][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.data = [self.shaft, self.wing1, self.wing2]
def _find_wing_coord(self):
"""Finds polar coordinates of arrowhead wing ends"""
frac = 0.1
r = 0.5
sin45 = np.sin(np.pi / 4.)
if self.out == True:
d = r - frac * sin45
elif self.out == False:
d = r + frac * sin45
else:
raise TypeError("arg: out must be True or False")
a = np.sqrt(frac**2 * sin45**2 + d**2)
alpha = np.arccos(d / a)
return [a, alpha]
|
Convert arrows notebook to importable py fileimport numpy as np
import plotly.graph_objs as go
def p2c(r, theta, phi):
"""Convert polar unit vector to cartesians"""
return [r * np.sin(theta) * np.cos(phi),
r * np.sin(theta) * np.sin(phi),
r * np.cos(theta)]
class Arrow:
def __init__(self, theta, phi, out, width=5, color='rgb(0,0,0)'):
"""
Args:
theta (float) - radians [0, π]
phi (float) - radians [0, 2π]
out (bool) - True if outgoing, False if incoming (to the origin)
width (int) - line thickness
color (hex/rgb) - line color
"""
self.theta = theta
self.phi = phi
self.out = out
self.width = width
self.color = color
wing_length, wing_angle = self._find_wing_coord()
shaft_xyz = p2c(1., self.theta, self.phi)
wings_xyz = [p2c(wing_length, self.theta + wing_angle, self.phi),
p2c(wing_length, self.theta - wing_angle, self.phi)]
self.shaft = go.Scatter3d(
x=[0, shaft_xyz[0]],
y=[0, shaft_xyz[1]],
z=[0, shaft_xyz[2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing1 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[0][0]],
y=[shaft_xyz[1] / 2., wings_xyz[0][1]],
z=[shaft_xyz[2] / 2., wings_xyz[0][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing2 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[1][0]],
y=[shaft_xyz[1] / 2., wings_xyz[1][1]],
z=[shaft_xyz[2] / 2., wings_xyz[1][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.data = [self.shaft, self.wing1, self.wing2]
def _find_wing_coord(self):
"""Finds polar coordinates of arrowhead wing ends"""
frac = 0.1
r = 0.5
sin45 = np.sin(np.pi / 4.)
if self.out == True:
d = r - frac * sin45
elif self.out == False:
d = r + frac * sin45
else:
raise TypeError("arg: out must be True or False")
a = np.sqrt(frac**2 * sin45**2 + d**2)
alpha = np.arccos(d / a)
return [a, alpha]
|
<commit_before><commit_msg>Convert arrows notebook to importable py file<commit_after>import numpy as np
import plotly.graph_objs as go
def p2c(r, theta, phi):
"""Convert polar unit vector to cartesians"""
return [r * np.sin(theta) * np.cos(phi),
r * np.sin(theta) * np.sin(phi),
r * np.cos(theta)]
class Arrow:
def __init__(self, theta, phi, out, width=5, color='rgb(0,0,0)'):
"""
Args:
theta (float) - radians [0, π]
phi (float) - radians [0, 2π]
out (bool) - True if outgoing, False if incoming (to the origin)
width (int) - line thickness
color (hex/rgb) - line color
"""
self.theta = theta
self.phi = phi
self.out = out
self.width = width
self.color = color
wing_length, wing_angle = self._find_wing_coord()
shaft_xyz = p2c(1., self.theta, self.phi)
wings_xyz = [p2c(wing_length, self.theta + wing_angle, self.phi),
p2c(wing_length, self.theta - wing_angle, self.phi)]
self.shaft = go.Scatter3d(
x=[0, shaft_xyz[0]],
y=[0, shaft_xyz[1]],
z=[0, shaft_xyz[2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing1 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[0][0]],
y=[shaft_xyz[1] / 2., wings_xyz[0][1]],
z=[shaft_xyz[2] / 2., wings_xyz[0][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.wing2 = go.Scatter3d(
x=[shaft_xyz[0] / 2., wings_xyz[1][0]],
y=[shaft_xyz[1] / 2., wings_xyz[1][1]],
z=[shaft_xyz[2] / 2., wings_xyz[1][2]],
showlegend=False, mode='lines', line={'width': self.width, 'color': self.color}
)
self.data = [self.shaft, self.wing1, self.wing2]
def _find_wing_coord(self):
"""Finds polar coordinates of arrowhead wing ends"""
frac = 0.1
r = 0.5
sin45 = np.sin(np.pi / 4.)
if self.out == True:
d = r - frac * sin45
elif self.out == False:
d = r + frac * sin45
else:
raise TypeError("arg: out must be True or False")
a = np.sqrt(frac**2 * sin45**2 + d**2)
alpha = np.arccos(d / a)
return [a, alpha]
|
|
f0d8821b42a017e95dc9528e051e7700bfde64e1
|
examples/deploytemplate-cli.py
|
examples/deploytemplate-cli.py
|
# deploytemplate.py
# authenticates using CLI e.g. run this in the Azure Cloud Shell
# takes a deployment template URI and a local parameters file and deploys it
# Arguments: -u templateUri
# -p parameters JSON file
# -l location
# -g existing resource group
# -s subscription
import argparse
import azurerm
from haikunator import Haikunator
import json
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--uri', '-u', required=True,
action='store', help='Template URI')
argParser.add_argument('--params', '-p', required=True,
action='store', help='Parameters json file')
argParser.add_argument('--location', '-l', required=True,
action='store', help='Location, e.g. eastus')
argParser.add_argument('--rg', '-g', required=True,
action='store', help='Resource Group name')
argParser.add_argument('--sub', '-s', required=True,
action='store', help='subscription id')
args = argParser.parse_args()
template_uri = args.uri
params = args.params
rgname = args.rg
location = args.location
subscription_id = args.sub
# load parameters file
try:
with open(params) as params_file:
param_data = json.load(params_file)
except FileNotFoundError:
print('Error: Expecting ' + params + ' in current folder')
sys.exit()
access_token = azurerm.get_access_token_from_cli()
deployment_name = Haikunator().haikunate()
print('Deployment name:' + deployment_name)
deploy_return = azurerm.deploy_template_uri(
access_token, subscription_id, rgname, deployment_name, template_uri, param_data)
print(json.dumps(deploy_return.json(), sort_keys=False, indent=2, separators=(',', ': ')))
|
Save cloud shell deploy template example
|
Save cloud shell deploy template example
|
Python
|
mit
|
gbowerman/azurerm
|
Save cloud shell deploy template example
|
# deploytemplate.py
# authenticates using CLI e.g. run this in the Azure Cloud Shell
# takes a deployment template URI and a local parameters file and deploys it
# Arguments: -u templateUri
# -p parameters JSON file
# -l location
# -g existing resource group
# -s subscription
import argparse
import azurerm
from haikunator import Haikunator
import json
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--uri', '-u', required=True,
action='store', help='Template URI')
argParser.add_argument('--params', '-p', required=True,
action='store', help='Parameters json file')
argParser.add_argument('--location', '-l', required=True,
action='store', help='Location, e.g. eastus')
argParser.add_argument('--rg', '-g', required=True,
action='store', help='Resource Group name')
argParser.add_argument('--sub', '-s', required=True,
action='store', help='subscription id')
args = argParser.parse_args()
template_uri = args.uri
params = args.params
rgname = args.rg
location = args.location
subscription_id = args.sub
# load parameters file
try:
with open(params) as params_file:
param_data = json.load(params_file)
except FileNotFoundError:
print('Error: Expecting ' + params + ' in current folder')
sys.exit()
access_token = azurerm.get_access_token_from_cli()
deployment_name = Haikunator().haikunate()
print('Deployment name:' + deployment_name)
deploy_return = azurerm.deploy_template_uri(
access_token, subscription_id, rgname, deployment_name, template_uri, param_data)
print(json.dumps(deploy_return.json(), sort_keys=False, indent=2, separators=(',', ': ')))
|
<commit_before><commit_msg>Save cloud shell deploy template example<commit_after>
|
# deploytemplate.py
# authenticates using CLI e.g. run this in the Azure Cloud Shell
# takes a deployment template URI and a local parameters file and deploys it
# Arguments: -u templateUri
# -p parameters JSON file
# -l location
# -g existing resource group
# -s subscription
import argparse
import azurerm
from haikunator import Haikunator
import json
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--uri', '-u', required=True,
action='store', help='Template URI')
argParser.add_argument('--params', '-p', required=True,
action='store', help='Parameters json file')
argParser.add_argument('--location', '-l', required=True,
action='store', help='Location, e.g. eastus')
argParser.add_argument('--rg', '-g', required=True,
action='store', help='Resource Group name')
argParser.add_argument('--sub', '-s', required=True,
action='store', help='subscription id')
args = argParser.parse_args()
template_uri = args.uri
params = args.params
rgname = args.rg
location = args.location
subscription_id = args.sub
# load parameters file
try:
with open(params) as params_file:
param_data = json.load(params_file)
except FileNotFoundError:
print('Error: Expecting ' + params + ' in current folder')
sys.exit()
access_token = azurerm.get_access_token_from_cli()
deployment_name = Haikunator().haikunate()
print('Deployment name:' + deployment_name)
deploy_return = azurerm.deploy_template_uri(
access_token, subscription_id, rgname, deployment_name, template_uri, param_data)
print(json.dumps(deploy_return.json(), sort_keys=False, indent=2, separators=(',', ': ')))
|
Save cloud shell deploy template example# deploytemplate.py
# authenticates using CLI e.g. run this in the Azure Cloud Shell
# takes a deployment template URI and a local parameters file and deploys it
# Arguments: -u templateUri
# -p parameters JSON file
# -l location
# -g existing resource group
# -s subscription
import argparse
import azurerm
from haikunator import Haikunator
import json
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--uri', '-u', required=True,
action='store', help='Template URI')
argParser.add_argument('--params', '-p', required=True,
action='store', help='Parameters json file')
argParser.add_argument('--location', '-l', required=True,
action='store', help='Location, e.g. eastus')
argParser.add_argument('--rg', '-g', required=True,
action='store', help='Resource Group name')
argParser.add_argument('--sub', '-s', required=True,
action='store', help='subscription id')
args = argParser.parse_args()
template_uri = args.uri
params = args.params
rgname = args.rg
location = args.location
subscription_id = args.sub
# load parameters file
try:
with open(params) as params_file:
param_data = json.load(params_file)
except FileNotFoundError:
print('Error: Expecting ' + params + ' in current folder')
sys.exit()
access_token = azurerm.get_access_token_from_cli()
deployment_name = Haikunator().haikunate()
print('Deployment name:' + deployment_name)
deploy_return = azurerm.deploy_template_uri(
access_token, subscription_id, rgname, deployment_name, template_uri, param_data)
print(json.dumps(deploy_return.json(), sort_keys=False, indent=2, separators=(',', ': ')))
|
<commit_before><commit_msg>Save cloud shell deploy template example<commit_after># deploytemplate.py
# authenticates using CLI e.g. run this in the Azure Cloud Shell
# takes a deployment template URI and a local parameters file and deploys it
# Arguments: -u templateUri
# -p parameters JSON file
# -l location
# -g existing resource group
# -s subscription
import argparse
import azurerm
from haikunator import Haikunator
import json
import sys
# validate command line arguments
argParser = argparse.ArgumentParser()
argParser.add_argument('--uri', '-u', required=True,
action='store', help='Template URI')
argParser.add_argument('--params', '-p', required=True,
action='store', help='Parameters json file')
argParser.add_argument('--location', '-l', required=True,
action='store', help='Location, e.g. eastus')
argParser.add_argument('--rg', '-g', required=True,
action='store', help='Resource Group name')
argParser.add_argument('--sub', '-s', required=True,
action='store', help='subscription id')
args = argParser.parse_args()
template_uri = args.uri
params = args.params
rgname = args.rg
location = args.location
subscription_id = args.sub
# load parameters file
try:
with open(params) as params_file:
param_data = json.load(params_file)
except FileNotFoundError:
print('Error: Expecting ' + params + ' in current folder')
sys.exit()
access_token = azurerm.get_access_token_from_cli()
deployment_name = Haikunator().haikunate()
print('Deployment name:' + deployment_name)
deploy_return = azurerm.deploy_template_uri(
access_token, subscription_id, rgname, deployment_name, template_uri, param_data)
print(json.dumps(deploy_return.json(), sort_keys=False, indent=2, separators=(',', ': ')))
|
|
604a413173c1699e954bf689ea948f5fcc58c9d6
|
events/migrations/0022_auto_20160229_2111.py
|
events/migrations/0022_auto_20160229_2111.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20160217_1832'),
]
operations = [
migrations.AlterModelManagers(
name='event',
managers=[
],
),
migrations.AlterModelManagers(
name='place',
managers=[
],
),
migrations.AlterField(
model_name='event',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='event',
name='location',
field=models.ForeignKey(blank=True, to='events.Place', null=True, on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='publisher',
field=models.ForeignKey(verbose_name='Publisher', to='events.Organization', related_name='published_events', on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='super_event',
field=mptt.fields.TreeForeignKey(blank=True, to='events.Event', related_name='sub_events', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keyword',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keywordset',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='organization',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='place',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
]
|
Add a migration for earlier changes
|
Add a migration for earlier changes
|
Python
|
mit
|
tuomas777/linkedevents,aapris/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents
|
Add a migration for earlier changes
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20160217_1832'),
]
operations = [
migrations.AlterModelManagers(
name='event',
managers=[
],
),
migrations.AlterModelManagers(
name='place',
managers=[
],
),
migrations.AlterField(
model_name='event',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='event',
name='location',
field=models.ForeignKey(blank=True, to='events.Place', null=True, on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='publisher',
field=models.ForeignKey(verbose_name='Publisher', to='events.Organization', related_name='published_events', on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='super_event',
field=mptt.fields.TreeForeignKey(blank=True, to='events.Event', related_name='sub_events', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keyword',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keywordset',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='organization',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='place',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
]
|
<commit_before><commit_msg>Add a migration for earlier changes<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20160217_1832'),
]
operations = [
migrations.AlterModelManagers(
name='event',
managers=[
],
),
migrations.AlterModelManagers(
name='place',
managers=[
],
),
migrations.AlterField(
model_name='event',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='event',
name='location',
field=models.ForeignKey(blank=True, to='events.Place', null=True, on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='publisher',
field=models.ForeignKey(verbose_name='Publisher', to='events.Organization', related_name='published_events', on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='super_event',
field=mptt.fields.TreeForeignKey(blank=True, to='events.Event', related_name='sub_events', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keyword',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keywordset',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='organization',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='place',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
]
|
Add a migration for earlier changes# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20160217_1832'),
]
operations = [
migrations.AlterModelManagers(
name='event',
managers=[
],
),
migrations.AlterModelManagers(
name='place',
managers=[
],
),
migrations.AlterField(
model_name='event',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='event',
name='location',
field=models.ForeignKey(blank=True, to='events.Place', null=True, on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='publisher',
field=models.ForeignKey(verbose_name='Publisher', to='events.Organization', related_name='published_events', on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='super_event',
field=mptt.fields.TreeForeignKey(blank=True, to='events.Event', related_name='sub_events', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keyword',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keywordset',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='organization',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='place',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
]
|
<commit_before><commit_msg>Add a migration for earlier changes<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20160217_1832'),
]
operations = [
migrations.AlterModelManagers(
name='event',
managers=[
],
),
migrations.AlterModelManagers(
name='place',
managers=[
],
),
migrations.AlterField(
model_name='event',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='event',
name='location',
field=models.ForeignKey(blank=True, to='events.Place', null=True, on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='publisher',
field=models.ForeignKey(verbose_name='Publisher', to='events.Organization', related_name='published_events', on_delete=django.db.models.deletion.PROTECT),
),
migrations.AlterField(
model_name='event',
name='super_event',
field=mptt.fields.TreeForeignKey(blank=True, to='events.Event', related_name='sub_events', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keyword',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='keywordset',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='organization',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
migrations.AlterField(
model_name='place',
name='image',
field=models.ForeignKey(verbose_name='Image', blank=True, to='events.Image', null=True, on_delete=django.db.models.deletion.SET_NULL),
),
]
|
|
af7a16eb085ae9a436dbd787e487a9228b66652f
|
srw/find_object.py
|
srw/find_object.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import logging
import ds9
logging.basicConfig(
level='DEBUG', format='%(asctime)s|%(name)s|%(levelname)s|%(message)s')
logger = logging.getLogger(__name__)
def main(args):
logger.debug(args)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-r', '--ra', required=True, type=float)
parser.add_argument('-d', '--dec', required=True, type=float)
main(parser.parse_args())
|
Copy file contents into path
|
Copy file contents into path
|
Python
|
mit
|
mindriot101/srw
|
Copy file contents into path
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import logging
import ds9
logging.basicConfig(
level='DEBUG', format='%(asctime)s|%(name)s|%(levelname)s|%(message)s')
logger = logging.getLogger(__name__)
def main(args):
logger.debug(args)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-r', '--ra', required=True, type=float)
parser.add_argument('-d', '--dec', required=True, type=float)
main(parser.parse_args())
|
<commit_before><commit_msg>Copy file contents into path<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import logging
import ds9
logging.basicConfig(
level='DEBUG', format='%(asctime)s|%(name)s|%(levelname)s|%(message)s')
logger = logging.getLogger(__name__)
def main(args):
logger.debug(args)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-r', '--ra', required=True, type=float)
parser.add_argument('-d', '--dec', required=True, type=float)
main(parser.parse_args())
|
Copy file contents into path#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import logging
import ds9
logging.basicConfig(
level='DEBUG', format='%(asctime)s|%(name)s|%(levelname)s|%(message)s')
logger = logging.getLogger(__name__)
def main(args):
logger.debug(args)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-r', '--ra', required=True, type=float)
parser.add_argument('-d', '--dec', required=True, type=float)
main(parser.parse_args())
|
<commit_before><commit_msg>Copy file contents into path<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import argparse
import logging
import ds9
logging.basicConfig(
level='DEBUG', format='%(asctime)s|%(name)s|%(levelname)s|%(message)s')
logger = logging.getLogger(__name__)
def main(args):
logger.debug(args)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-r', '--ra', required=True, type=float)
parser.add_argument('-d', '--dec', required=True, type=float)
main(parser.parse_args())
|
|
39a65299bd72019cce3691cabd5f2d8afebc61ac
|
suite/ppcbranch.py
|
suite/ppcbranch.py
|
#!/usr/bin/env python
# Capstone by Nguyen Anh Quynnh <aquynh@gmail.com>
# PPC Branch testing suite by kratolp
from __future__ import print_function
import sys
from capstone import *
CODE32 = b"\x48\x01\x05\x15" # bl .+0x10514
CODE32 += b"\x4B\xff\xff\xfd" # bl .-0x4
CODE32 += b"\x48\x00\x00\x0c" # b .+0xc
CODE32 += b"\x41\x80\xff\xd8" # blt .-0x28
CODE32 += b"\x40\x80\xff\xec" # bge .-0x14
CODE32 += b"\x41\x84\x01\x6c" # blt cr1, .+0x16c
CODE32 += b"\x41\x82\x00\x10" # beq .+0x10
CODE32 += b"\x40\x82\x00\x08" # bne .+0x8
CODE32 += b"\x40\x95\x00\x94" # ble cr5,.+0x94
CODE32 += b"\x40\x9f\x10\x30" # bns cr5,.+0x94
CODE32 += b"\x42\x00\xff\xd8" # bdnz .-0x28
CODE32 += b"\x4d\x82\x00\x20" # beqlr
CODE32 += b"\x4e\x80\x00\x20" # blr
_python3 = sys.version_info.major == 3
all_tests = (
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CODE32, "PPC branch instruction decoding", 0),
)
def to_hex(s):
if _python3:
return " ".join("0x{0:02x}".format(c) for c in s) # <-- Python 3 is OK
else:
return " ".join("0x{0:02x}".format(ord(c)) for c in s)
# ## Test cs_disasm_quick()
def test_cs_disasm_quick():
for (arch, mode, code, comment, syntax) in all_tests:
print("Platform: %s" % comment)
print("Code: %s" %(to_hex(code))),
print("Disasm:")
for (addr, size, mnemonic, op_str) in cs_disasm_lite(arch, mode, code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print()
if __name__ == '__main__':
test_cs_disasm_quick()
|
Add ppc branch test suite
|
Add ppc branch test suite
|
Python
|
bsd-3-clause
|
dynm/capstone,bughoho/capstone,pyq881120/capstone,zneak/capstone,capturePointer/capstone,NeilBryant/capstone,dynm/capstone,code4bones/capstone,pyq881120/capstone,pranith/capstone,sigma-random/capstone,xia0pin9/capstone,xia0pin9/capstone,techvoltage/capstone,8l/capstone,krytarowski/capstone,dynm/capstone,bowlofstew/capstone,AmesianX/capstone,pombredanne/capstone,zneak/capstone,sigma-random/capstone,AmesianX/capstone,angelabier1/capstone,techvoltage/capstone,dynm/capstone,pombredanne/capstone,bSr43/capstone,capturePointer/capstone,nplanel/capstone,8l/capstone,krytarowski/capstone,bowlofstew/capstone,sephiroth99/capstone,sephiroth99/capstone,zneak/capstone,nplanel/capstone,bughoho/capstone,bigendiansmalls/capstone,bSr43/capstone,bughoho/capstone,AmesianX/capstone,code4bones/capstone,xia0pin9/capstone,NeilBryant/capstone,zneak/capstone,sigma-random/capstone,07151129/capstone,xia0pin9/capstone,capturePointer/capstone,xia0pin9/capstone,fvrmatteo/capstone,nplanel/capstone,pombredanne/capstone,fvrmatteo/capstone,bowlofstew/capstone,code4bones/capstone,bigendiansmalls/capstone,dynm/capstone,07151129/capstone,sigma-random/capstone,bughoho/capstone,xia0pin9/capstone,AmesianX/capstone,zneak/capstone,bowlofstew/capstone,nplanel/capstone,bigendiansmalls/capstone,angelabier1/capstone,bughoho/capstone,angelabier1/capstone,bughoho/capstone,capturePointer/capstone,sigma-random/capstone,AmesianX/capstone,bigendiansmalls/capstone,bigendiansmalls/capstone,8l/capstone,bowlofstew/capstone,bowlofstew/capstone,bSr43/capstone,angelabier1/capstone,bSr43/capstone,krytarowski/capstone,pombredanne/capstone,NeilBryant/capstone,fvrmatteo/capstone,NeilBryant/capstone,techvoltage/capstone,zuloloxi/capstone,techvoltage/capstone,pombredanne/capstone,07151129/capstone,pranith/capstone,pranith/capstone,pyq881120/capstone,sigma-random/capstone,pombredanne/capstone,zneak/capstone,8l/capstone,sephiroth99/capstone,krytarowski/capstone,bSr43/capstone,zuloloxi/capstone,bughoho/capstone,bSr43/capstone,AmesianX/capstone,nplanel/capstone,07151129/capstone,NeilBryant/capstone,8l/capstone,NeilBryant/capstone,sephiroth99/capstone,zuloloxi/capstone,angelabier1/capstone,angelabier1/capstone,zuloloxi/capstone,nplanel/capstone,zneak/capstone,pranith/capstone,angelabier1/capstone,sephiroth99/capstone,fvrmatteo/capstone,code4bones/capstone,pyq881120/capstone,07151129/capstone,code4bones/capstone,sephiroth99/capstone,07151129/capstone,fvrmatteo/capstone,pranith/capstone,bigendiansmalls/capstone,capturePointer/capstone,pranith/capstone,sigma-random/capstone,dynm/capstone,zuloloxi/capstone,xia0pin9/capstone,pranith/capstone,bigendiansmalls/capstone,krytarowski/capstone,techvoltage/capstone,bSr43/capstone,bowlofstew/capstone,zuloloxi/capstone,krytarowski/capstone,8l/capstone,pyq881120/capstone,zuloloxi/capstone,techvoltage/capstone,pyq881120/capstone,07151129/capstone,nplanel/capstone,fvrmatteo/capstone,8l/capstone,krytarowski/capstone,techvoltage/capstone,NeilBryant/capstone,capturePointer/capstone,capturePointer/capstone,AmesianX/capstone,fvrmatteo/capstone,code4bones/capstone,pyq881120/capstone,sephiroth99/capstone,pombredanne/capstone,dynm/capstone,code4bones/capstone
|
Add ppc branch test suite
|
#!/usr/bin/env python
# Capstone by Nguyen Anh Quynnh <aquynh@gmail.com>
# PPC Branch testing suite by kratolp
from __future__ import print_function
import sys
from capstone import *
CODE32 = b"\x48\x01\x05\x15" # bl .+0x10514
CODE32 += b"\x4B\xff\xff\xfd" # bl .-0x4
CODE32 += b"\x48\x00\x00\x0c" # b .+0xc
CODE32 += b"\x41\x80\xff\xd8" # blt .-0x28
CODE32 += b"\x40\x80\xff\xec" # bge .-0x14
CODE32 += b"\x41\x84\x01\x6c" # blt cr1, .+0x16c
CODE32 += b"\x41\x82\x00\x10" # beq .+0x10
CODE32 += b"\x40\x82\x00\x08" # bne .+0x8
CODE32 += b"\x40\x95\x00\x94" # ble cr5,.+0x94
CODE32 += b"\x40\x9f\x10\x30" # bns cr5,.+0x94
CODE32 += b"\x42\x00\xff\xd8" # bdnz .-0x28
CODE32 += b"\x4d\x82\x00\x20" # beqlr
CODE32 += b"\x4e\x80\x00\x20" # blr
_python3 = sys.version_info.major == 3
all_tests = (
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CODE32, "PPC branch instruction decoding", 0),
)
def to_hex(s):
if _python3:
return " ".join("0x{0:02x}".format(c) for c in s) # <-- Python 3 is OK
else:
return " ".join("0x{0:02x}".format(ord(c)) for c in s)
# ## Test cs_disasm_quick()
def test_cs_disasm_quick():
for (arch, mode, code, comment, syntax) in all_tests:
print("Platform: %s" % comment)
print("Code: %s" %(to_hex(code))),
print("Disasm:")
for (addr, size, mnemonic, op_str) in cs_disasm_lite(arch, mode, code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print()
if __name__ == '__main__':
test_cs_disasm_quick()
|
<commit_before><commit_msg>Add ppc branch test suite<commit_after>
|
#!/usr/bin/env python
# Capstone by Nguyen Anh Quynnh <aquynh@gmail.com>
# PPC Branch testing suite by kratolp
from __future__ import print_function
import sys
from capstone import *
CODE32 = b"\x48\x01\x05\x15" # bl .+0x10514
CODE32 += b"\x4B\xff\xff\xfd" # bl .-0x4
CODE32 += b"\x48\x00\x00\x0c" # b .+0xc
CODE32 += b"\x41\x80\xff\xd8" # blt .-0x28
CODE32 += b"\x40\x80\xff\xec" # bge .-0x14
CODE32 += b"\x41\x84\x01\x6c" # blt cr1, .+0x16c
CODE32 += b"\x41\x82\x00\x10" # beq .+0x10
CODE32 += b"\x40\x82\x00\x08" # bne .+0x8
CODE32 += b"\x40\x95\x00\x94" # ble cr5,.+0x94
CODE32 += b"\x40\x9f\x10\x30" # bns cr5,.+0x94
CODE32 += b"\x42\x00\xff\xd8" # bdnz .-0x28
CODE32 += b"\x4d\x82\x00\x20" # beqlr
CODE32 += b"\x4e\x80\x00\x20" # blr
_python3 = sys.version_info.major == 3
all_tests = (
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CODE32, "PPC branch instruction decoding", 0),
)
def to_hex(s):
if _python3:
return " ".join("0x{0:02x}".format(c) for c in s) # <-- Python 3 is OK
else:
return " ".join("0x{0:02x}".format(ord(c)) for c in s)
# ## Test cs_disasm_quick()
def test_cs_disasm_quick():
for (arch, mode, code, comment, syntax) in all_tests:
print("Platform: %s" % comment)
print("Code: %s" %(to_hex(code))),
print("Disasm:")
for (addr, size, mnemonic, op_str) in cs_disasm_lite(arch, mode, code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print()
if __name__ == '__main__':
test_cs_disasm_quick()
|
Add ppc branch test suite#!/usr/bin/env python
# Capstone by Nguyen Anh Quynnh <aquynh@gmail.com>
# PPC Branch testing suite by kratolp
from __future__ import print_function
import sys
from capstone import *
CODE32 = b"\x48\x01\x05\x15" # bl .+0x10514
CODE32 += b"\x4B\xff\xff\xfd" # bl .-0x4
CODE32 += b"\x48\x00\x00\x0c" # b .+0xc
CODE32 += b"\x41\x80\xff\xd8" # blt .-0x28
CODE32 += b"\x40\x80\xff\xec" # bge .-0x14
CODE32 += b"\x41\x84\x01\x6c" # blt cr1, .+0x16c
CODE32 += b"\x41\x82\x00\x10" # beq .+0x10
CODE32 += b"\x40\x82\x00\x08" # bne .+0x8
CODE32 += b"\x40\x95\x00\x94" # ble cr5,.+0x94
CODE32 += b"\x40\x9f\x10\x30" # bns cr5,.+0x94
CODE32 += b"\x42\x00\xff\xd8" # bdnz .-0x28
CODE32 += b"\x4d\x82\x00\x20" # beqlr
CODE32 += b"\x4e\x80\x00\x20" # blr
_python3 = sys.version_info.major == 3
all_tests = (
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CODE32, "PPC branch instruction decoding", 0),
)
def to_hex(s):
if _python3:
return " ".join("0x{0:02x}".format(c) for c in s) # <-- Python 3 is OK
else:
return " ".join("0x{0:02x}".format(ord(c)) for c in s)
# ## Test cs_disasm_quick()
def test_cs_disasm_quick():
for (arch, mode, code, comment, syntax) in all_tests:
print("Platform: %s" % comment)
print("Code: %s" %(to_hex(code))),
print("Disasm:")
for (addr, size, mnemonic, op_str) in cs_disasm_lite(arch, mode, code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print()
if __name__ == '__main__':
test_cs_disasm_quick()
|
<commit_before><commit_msg>Add ppc branch test suite<commit_after>#!/usr/bin/env python
# Capstone by Nguyen Anh Quynnh <aquynh@gmail.com>
# PPC Branch testing suite by kratolp
from __future__ import print_function
import sys
from capstone import *
CODE32 = b"\x48\x01\x05\x15" # bl .+0x10514
CODE32 += b"\x4B\xff\xff\xfd" # bl .-0x4
CODE32 += b"\x48\x00\x00\x0c" # b .+0xc
CODE32 += b"\x41\x80\xff\xd8" # blt .-0x28
CODE32 += b"\x40\x80\xff\xec" # bge .-0x14
CODE32 += b"\x41\x84\x01\x6c" # blt cr1, .+0x16c
CODE32 += b"\x41\x82\x00\x10" # beq .+0x10
CODE32 += b"\x40\x82\x00\x08" # bne .+0x8
CODE32 += b"\x40\x95\x00\x94" # ble cr5,.+0x94
CODE32 += b"\x40\x9f\x10\x30" # bns cr5,.+0x94
CODE32 += b"\x42\x00\xff\xd8" # bdnz .-0x28
CODE32 += b"\x4d\x82\x00\x20" # beqlr
CODE32 += b"\x4e\x80\x00\x20" # blr
_python3 = sys.version_info.major == 3
all_tests = (
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, CODE32, "PPC branch instruction decoding", 0),
)
def to_hex(s):
if _python3:
return " ".join("0x{0:02x}".format(c) for c in s) # <-- Python 3 is OK
else:
return " ".join("0x{0:02x}".format(ord(c)) for c in s)
# ## Test cs_disasm_quick()
def test_cs_disasm_quick():
for (arch, mode, code, comment, syntax) in all_tests:
print("Platform: %s" % comment)
print("Code: %s" %(to_hex(code))),
print("Disasm:")
for (addr, size, mnemonic, op_str) in cs_disasm_lite(arch, mode, code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print()
if __name__ == '__main__':
test_cs_disasm_quick()
|
|
4a561e594aa026ea038f0df4a65bd0438beaa112
|
webapp/apps/staff/views.py
|
webapp/apps/staff/views.py
|
from django.shortcuts import get_object_or_404, render, redirect
from django.shortcuts import render
from apps.staff.forms import NewUserForm
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth.models import User
def index_view(request):
users = User.objects.all()
return render(request, 'staff/users/index.jinja',
{'users': users})
def users_new_view(request):
if request.method == 'POST':
form = NewUserForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
if User.objects.filter(email=email).exists():
messages.add_message(request, messages.ERROR, 'Username with\
that email already exists')
else:
User.objects.create_user(
username = form.cleaned_data['username'],
email = email,
password = form.cleaned_data['password'],
last_name = form.cleaned_data['last_name'],
first_name = form.cleaned_data['first_name']
)
# messages = "User created"
messages.add_message(request, messages.SUCCESS, 'User create!.')
else:
form = NewUserForm()
messages.add_message(request, messages.INFO, 'This is a message!.')
return render(request, 'staff/users/new.jinja', {
'form': form,
})
|
Add list of users to staff/users
|
Add list of users to staff/users
Return a list of users to staff/users index view
|
Python
|
apache-2.0
|
patrickspencer/compass-python,patrickspencer/compass,patrickspencer/compass,patrickspencer/compass-python,patrickspencer/compass-python,patrickspencer/compass-python,patrickspencer/compass-python,patrickspencer/compass,patrickspencer/compass,patrickspencer/compass
|
Add list of users to staff/users
Return a list of users to staff/users index view
|
from django.shortcuts import get_object_or_404, render, redirect
from django.shortcuts import render
from apps.staff.forms import NewUserForm
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth.models import User
def index_view(request):
users = User.objects.all()
return render(request, 'staff/users/index.jinja',
{'users': users})
def users_new_view(request):
if request.method == 'POST':
form = NewUserForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
if User.objects.filter(email=email).exists():
messages.add_message(request, messages.ERROR, 'Username with\
that email already exists')
else:
User.objects.create_user(
username = form.cleaned_data['username'],
email = email,
password = form.cleaned_data['password'],
last_name = form.cleaned_data['last_name'],
first_name = form.cleaned_data['first_name']
)
# messages = "User created"
messages.add_message(request, messages.SUCCESS, 'User create!.')
else:
form = NewUserForm()
messages.add_message(request, messages.INFO, 'This is a message!.')
return render(request, 'staff/users/new.jinja', {
'form': form,
})
|
<commit_before><commit_msg>Add list of users to staff/users
Return a list of users to staff/users index view<commit_after>
|
from django.shortcuts import get_object_or_404, render, redirect
from django.shortcuts import render
from apps.staff.forms import NewUserForm
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth.models import User
def index_view(request):
users = User.objects.all()
return render(request, 'staff/users/index.jinja',
{'users': users})
def users_new_view(request):
if request.method == 'POST':
form = NewUserForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
if User.objects.filter(email=email).exists():
messages.add_message(request, messages.ERROR, 'Username with\
that email already exists')
else:
User.objects.create_user(
username = form.cleaned_data['username'],
email = email,
password = form.cleaned_data['password'],
last_name = form.cleaned_data['last_name'],
first_name = form.cleaned_data['first_name']
)
# messages = "User created"
messages.add_message(request, messages.SUCCESS, 'User create!.')
else:
form = NewUserForm()
messages.add_message(request, messages.INFO, 'This is a message!.')
return render(request, 'staff/users/new.jinja', {
'form': form,
})
|
Add list of users to staff/users
Return a list of users to staff/users index viewfrom django.shortcuts import get_object_or_404, render, redirect
from django.shortcuts import render
from apps.staff.forms import NewUserForm
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth.models import User
def index_view(request):
users = User.objects.all()
return render(request, 'staff/users/index.jinja',
{'users': users})
def users_new_view(request):
if request.method == 'POST':
form = NewUserForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
if User.objects.filter(email=email).exists():
messages.add_message(request, messages.ERROR, 'Username with\
that email already exists')
else:
User.objects.create_user(
username = form.cleaned_data['username'],
email = email,
password = form.cleaned_data['password'],
last_name = form.cleaned_data['last_name'],
first_name = form.cleaned_data['first_name']
)
# messages = "User created"
messages.add_message(request, messages.SUCCESS, 'User create!.')
else:
form = NewUserForm()
messages.add_message(request, messages.INFO, 'This is a message!.')
return render(request, 'staff/users/new.jinja', {
'form': form,
})
|
<commit_before><commit_msg>Add list of users to staff/users
Return a list of users to staff/users index view<commit_after>from django.shortcuts import get_object_or_404, render, redirect
from django.shortcuts import render
from apps.staff.forms import NewUserForm
from django.contrib.auth.models import User
from django.contrib import messages
from django.contrib.auth.models import User
def index_view(request):
users = User.objects.all()
return render(request, 'staff/users/index.jinja',
{'users': users})
def users_new_view(request):
if request.method == 'POST':
form = NewUserForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
email = form.cleaned_data['email']
if User.objects.filter(email=email).exists():
messages.add_message(request, messages.ERROR, 'Username with\
that email already exists')
else:
User.objects.create_user(
username = form.cleaned_data['username'],
email = email,
password = form.cleaned_data['password'],
last_name = form.cleaned_data['last_name'],
first_name = form.cleaned_data['first_name']
)
# messages = "User created"
messages.add_message(request, messages.SUCCESS, 'User create!.')
else:
form = NewUserForm()
messages.add_message(request, messages.INFO, 'This is a message!.')
return render(request, 'staff/users/new.jinja', {
'form': form,
})
|
|
51c954e49d19ebb5e707ec3cd164d3fd9ba5fbe0
|
pcs/packets/ipv6ext.py
|
pcs/packets/ipv6ext.py
|
# Copyright (C) 2013, Neville-neil Consulting
# All Rights Reserved.
#
# Redistribution And Use In Source And Binary Forms, With Or Without
# Modification, Are Permitted Provided That The Following Conditions Are
# Met:
#
# Redistributions Of Source Code Must Retain The Above Copyright Notice,
# This List Of Conditions And The Following Disclaimer.
#
# Redistributions In Binary Form Must Reproduce The Above Copyright
# Notice, This List Of Conditions And The Following Disclaimer In The
# Documentation And/Or Other Materials Provided With The Distribution.
#
# Neither The Name Of Neville-neil Consulting Nor The Names Of Its
# Contributors May Be Used To Endorse Or Promote Products Derived From
# This Software Without Specific Prior Written Permission.
#
# This Software Is Provided By The Copyright Holders And Contributors
# "As Is" And Any Express Or Implied Warranties, Including, But Not
# Limited To, The Implied Warranties Of Merchantability And Fitness For
# A Particular Purpose Are Disclaimed. In No Event Shall The Copyright
# Owner Or Contributors Be Liable For Any Direct, Indirect, Incidental,
# Special, Exemplary, Or Consequential Damages (Including, But Not
# Limited To, Procurement Of Substitute Goods Or Services; Loss Of Use,
# Data, Or Profits; Or Business Interruption) However Caused And On Any
# Theory Of Liability, Whether In Contract, Strict Liability, Or Tort
# (Including Negligence Or Otherwise) Arising In Any Way Out Of The Use
# Of This Software, Even If Advised Of The Possibility Of Such Damage.
#
# File: $Id:$
#
# Author: Mike Karels
#
# Description: IPv6 routing extension header
#import pcs
class rt_ext(pcs.Packet):
""" Routing extension header, type 0 """
_layout = pcs.Layout()
def __init__(self, bytes = None, count = 1, **kv):
next_header = pcs.Field("next_header", 8)
length = pcs.Field("length", 8, default = 2 * count)
type = pcs.Field("type", 8, default = 0)
segments_left = pcs.Field("segments_left", 8, default = count)
reserved = pcs.Field("reserved", 4 * 8, default = 0)
# XXX just define one address for convenience
addr1 = pcs.StringField("addr1", 16 * 8)
pcs.Packet.__init__(self,
[next_header, length, type, segments_left,
reserved, addr1], bytes, **kv)
self.description = "Type 0 Routing header"
|
Add a class that implements the IPv6 routing header.
|
Add a class that implements the IPv6 routing header.
|
Python
|
bsd-3-clause
|
gvnn3/PCS,gvnn3/PCS
|
Add a class that implements the IPv6 routing header.
|
# Copyright (C) 2013, Neville-neil Consulting
# All Rights Reserved.
#
# Redistribution And Use In Source And Binary Forms, With Or Without
# Modification, Are Permitted Provided That The Following Conditions Are
# Met:
#
# Redistributions Of Source Code Must Retain The Above Copyright Notice,
# This List Of Conditions And The Following Disclaimer.
#
# Redistributions In Binary Form Must Reproduce The Above Copyright
# Notice, This List Of Conditions And The Following Disclaimer In The
# Documentation And/Or Other Materials Provided With The Distribution.
#
# Neither The Name Of Neville-neil Consulting Nor The Names Of Its
# Contributors May Be Used To Endorse Or Promote Products Derived From
# This Software Without Specific Prior Written Permission.
#
# This Software Is Provided By The Copyright Holders And Contributors
# "As Is" And Any Express Or Implied Warranties, Including, But Not
# Limited To, The Implied Warranties Of Merchantability And Fitness For
# A Particular Purpose Are Disclaimed. In No Event Shall The Copyright
# Owner Or Contributors Be Liable For Any Direct, Indirect, Incidental,
# Special, Exemplary, Or Consequential Damages (Including, But Not
# Limited To, Procurement Of Substitute Goods Or Services; Loss Of Use,
# Data, Or Profits; Or Business Interruption) However Caused And On Any
# Theory Of Liability, Whether In Contract, Strict Liability, Or Tort
# (Including Negligence Or Otherwise) Arising In Any Way Out Of The Use
# Of This Software, Even If Advised Of The Possibility Of Such Damage.
#
# File: $Id:$
#
# Author: Mike Karels
#
# Description: IPv6 routing extension header
#import pcs
class rt_ext(pcs.Packet):
""" Routing extension header, type 0 """
_layout = pcs.Layout()
def __init__(self, bytes = None, count = 1, **kv):
next_header = pcs.Field("next_header", 8)
length = pcs.Field("length", 8, default = 2 * count)
type = pcs.Field("type", 8, default = 0)
segments_left = pcs.Field("segments_left", 8, default = count)
reserved = pcs.Field("reserved", 4 * 8, default = 0)
# XXX just define one address for convenience
addr1 = pcs.StringField("addr1", 16 * 8)
pcs.Packet.__init__(self,
[next_header, length, type, segments_left,
reserved, addr1], bytes, **kv)
self.description = "Type 0 Routing header"
|
<commit_before><commit_msg>Add a class that implements the IPv6 routing header.<commit_after>
|
# Copyright (C) 2013, Neville-neil Consulting
# All Rights Reserved.
#
# Redistribution And Use In Source And Binary Forms, With Or Without
# Modification, Are Permitted Provided That The Following Conditions Are
# Met:
#
# Redistributions Of Source Code Must Retain The Above Copyright Notice,
# This List Of Conditions And The Following Disclaimer.
#
# Redistributions In Binary Form Must Reproduce The Above Copyright
# Notice, This List Of Conditions And The Following Disclaimer In The
# Documentation And/Or Other Materials Provided With The Distribution.
#
# Neither The Name Of Neville-neil Consulting Nor The Names Of Its
# Contributors May Be Used To Endorse Or Promote Products Derived From
# This Software Without Specific Prior Written Permission.
#
# This Software Is Provided By The Copyright Holders And Contributors
# "As Is" And Any Express Or Implied Warranties, Including, But Not
# Limited To, The Implied Warranties Of Merchantability And Fitness For
# A Particular Purpose Are Disclaimed. In No Event Shall The Copyright
# Owner Or Contributors Be Liable For Any Direct, Indirect, Incidental,
# Special, Exemplary, Or Consequential Damages (Including, But Not
# Limited To, Procurement Of Substitute Goods Or Services; Loss Of Use,
# Data, Or Profits; Or Business Interruption) However Caused And On Any
# Theory Of Liability, Whether In Contract, Strict Liability, Or Tort
# (Including Negligence Or Otherwise) Arising In Any Way Out Of The Use
# Of This Software, Even If Advised Of The Possibility Of Such Damage.
#
# File: $Id:$
#
# Author: Mike Karels
#
# Description: IPv6 routing extension header
#import pcs
class rt_ext(pcs.Packet):
""" Routing extension header, type 0 """
_layout = pcs.Layout()
def __init__(self, bytes = None, count = 1, **kv):
next_header = pcs.Field("next_header", 8)
length = pcs.Field("length", 8, default = 2 * count)
type = pcs.Field("type", 8, default = 0)
segments_left = pcs.Field("segments_left", 8, default = count)
reserved = pcs.Field("reserved", 4 * 8, default = 0)
# XXX just define one address for convenience
addr1 = pcs.StringField("addr1", 16 * 8)
pcs.Packet.__init__(self,
[next_header, length, type, segments_left,
reserved, addr1], bytes, **kv)
self.description = "Type 0 Routing header"
|
Add a class that implements the IPv6 routing header.# Copyright (C) 2013, Neville-neil Consulting
# All Rights Reserved.
#
# Redistribution And Use In Source And Binary Forms, With Or Without
# Modification, Are Permitted Provided That The Following Conditions Are
# Met:
#
# Redistributions Of Source Code Must Retain The Above Copyright Notice,
# This List Of Conditions And The Following Disclaimer.
#
# Redistributions In Binary Form Must Reproduce The Above Copyright
# Notice, This List Of Conditions And The Following Disclaimer In The
# Documentation And/Or Other Materials Provided With The Distribution.
#
# Neither The Name Of Neville-neil Consulting Nor The Names Of Its
# Contributors May Be Used To Endorse Or Promote Products Derived From
# This Software Without Specific Prior Written Permission.
#
# This Software Is Provided By The Copyright Holders And Contributors
# "As Is" And Any Express Or Implied Warranties, Including, But Not
# Limited To, The Implied Warranties Of Merchantability And Fitness For
# A Particular Purpose Are Disclaimed. In No Event Shall The Copyright
# Owner Or Contributors Be Liable For Any Direct, Indirect, Incidental,
# Special, Exemplary, Or Consequential Damages (Including, But Not
# Limited To, Procurement Of Substitute Goods Or Services; Loss Of Use,
# Data, Or Profits; Or Business Interruption) However Caused And On Any
# Theory Of Liability, Whether In Contract, Strict Liability, Or Tort
# (Including Negligence Or Otherwise) Arising In Any Way Out Of The Use
# Of This Software, Even If Advised Of The Possibility Of Such Damage.
#
# File: $Id:$
#
# Author: Mike Karels
#
# Description: IPv6 routing extension header
#import pcs
class rt_ext(pcs.Packet):
""" Routing extension header, type 0 """
_layout = pcs.Layout()
def __init__(self, bytes = None, count = 1, **kv):
next_header = pcs.Field("next_header", 8)
length = pcs.Field("length", 8, default = 2 * count)
type = pcs.Field("type", 8, default = 0)
segments_left = pcs.Field("segments_left", 8, default = count)
reserved = pcs.Field("reserved", 4 * 8, default = 0)
# XXX just define one address for convenience
addr1 = pcs.StringField("addr1", 16 * 8)
pcs.Packet.__init__(self,
[next_header, length, type, segments_left,
reserved, addr1], bytes, **kv)
self.description = "Type 0 Routing header"
|
<commit_before><commit_msg>Add a class that implements the IPv6 routing header.<commit_after># Copyright (C) 2013, Neville-neil Consulting
# All Rights Reserved.
#
# Redistribution And Use In Source And Binary Forms, With Or Without
# Modification, Are Permitted Provided That The Following Conditions Are
# Met:
#
# Redistributions Of Source Code Must Retain The Above Copyright Notice,
# This List Of Conditions And The Following Disclaimer.
#
# Redistributions In Binary Form Must Reproduce The Above Copyright
# Notice, This List Of Conditions And The Following Disclaimer In The
# Documentation And/Or Other Materials Provided With The Distribution.
#
# Neither The Name Of Neville-neil Consulting Nor The Names Of Its
# Contributors May Be Used To Endorse Or Promote Products Derived From
# This Software Without Specific Prior Written Permission.
#
# This Software Is Provided By The Copyright Holders And Contributors
# "As Is" And Any Express Or Implied Warranties, Including, But Not
# Limited To, The Implied Warranties Of Merchantability And Fitness For
# A Particular Purpose Are Disclaimed. In No Event Shall The Copyright
# Owner Or Contributors Be Liable For Any Direct, Indirect, Incidental,
# Special, Exemplary, Or Consequential Damages (Including, But Not
# Limited To, Procurement Of Substitute Goods Or Services; Loss Of Use,
# Data, Or Profits; Or Business Interruption) However Caused And On Any
# Theory Of Liability, Whether In Contract, Strict Liability, Or Tort
# (Including Negligence Or Otherwise) Arising In Any Way Out Of The Use
# Of This Software, Even If Advised Of The Possibility Of Such Damage.
#
# File: $Id:$
#
# Author: Mike Karels
#
# Description: IPv6 routing extension header
#import pcs
class rt_ext(pcs.Packet):
""" Routing extension header, type 0 """
_layout = pcs.Layout()
def __init__(self, bytes = None, count = 1, **kv):
next_header = pcs.Field("next_header", 8)
length = pcs.Field("length", 8, default = 2 * count)
type = pcs.Field("type", 8, default = 0)
segments_left = pcs.Field("segments_left", 8, default = count)
reserved = pcs.Field("reserved", 4 * 8, default = 0)
# XXX just define one address for convenience
addr1 = pcs.StringField("addr1", 16 * 8)
pcs.Packet.__init__(self,
[next_header, length, type, segments_left,
reserved, addr1], bytes, **kv)
self.description = "Type 0 Routing header"
|
|
ab36778ec3c8ed69ce798816161ee35a368e2dc2
|
tests/test_base.py
|
tests/test_base.py
|
# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
|
Improve unit tests for python-glanceclient.glanceclient.common.base
|
Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248
|
Python
|
apache-2.0
|
ntt-sic/python-glanceclient,citrix-openstack-build/python-glanceclient,metacloud/python-glanceclient,alexpilotti/python-glanceclient,metacloud/python-glanceclient,klmitch/python-glanceclient,klmitch/python-glanceclient,citrix-openstack-build/python-glanceclient,varunarya10/python-glanceclient,JioCloud/python-glanceclient,alexpilotti/python-glanceclient,openstack/python-glanceclient,mmasaki/python-glanceclient,openstack/python-glanceclient,ntt-sic/python-glanceclient,mmasaki/python-glanceclient,varunarya10/python-glanceclient,JioCloud/python-glanceclient
|
Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248
|
# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
|
<commit_before><commit_msg>Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248<commit_after>
|
# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
|
Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
|
<commit_before><commit_msg>Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248<commit_after># Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
|
|
086aad14864ddeae8cf050f5ed2b0f57773a4fad
|
tests/link_tests/substitution_tests.py
|
tests/link_tests/substitution_tests.py
|
from utils import LinkTest, main
import re
class SubstitutionTest(LinkTest):
def testSubstititonFunction(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", lambda m: 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testSubstitionString(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testOrdering(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!'),
(r"<.+>", 'Bye!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testCounter(self):
text = """
<question>
<question>
<question>
"""
count = 0
def sub(m):
nonlocal count
count += 1
return '<h1>Question {}</h1>'.format(count)
subs = [
(r"<question>", sub),
]
expect = """
<h1>Question 1</h1>
<h1>Question 2</h1>
<h1>Question 3</h1>
"""
self.assertSubstitution(text, expect, subs)
class ScrapeHeadersTest(LinkTest):
def testBasic(self):
text = """
+ Header 1
+ Not a header
+ Header 2
+ Header 3
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = [
'Title 1',
'Title 2',
'Title 3',
]
self.assertHeaders(text, regex, translate, expect)
def testNoHeaders(self):
text = """
No headers
to be found!
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = []
self.assertHeaders(text, regex, translate, expect)
def testCounter(self):
text = """
+ Header To be
+ Header Or not to be
+ Header That is the question
"""
regex = r"\+ Header ([\w ]+)"
count = 0
def translate(m):
nonlocal count
count += 1
return "Section {}: {}".format(count, m.group(1))
expect = [
'Section 1: To be',
'Section 2: Or not to be',
'Section 3: That is the question',
]
self.assertHeaders(text, regex, translate, expect)
if __name__ == '__main__':
main()
|
Add substitution and scrape_header tests
|
Add substitution and scrape_header tests
|
Python
|
mit
|
albert12132/templar,albert12132/templar
|
Add substitution and scrape_header tests
|
from utils import LinkTest, main
import re
class SubstitutionTest(LinkTest):
def testSubstititonFunction(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", lambda m: 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testSubstitionString(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testOrdering(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!'),
(r"<.+>", 'Bye!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testCounter(self):
text = """
<question>
<question>
<question>
"""
count = 0
def sub(m):
nonlocal count
count += 1
return '<h1>Question {}</h1>'.format(count)
subs = [
(r"<question>", sub),
]
expect = """
<h1>Question 1</h1>
<h1>Question 2</h1>
<h1>Question 3</h1>
"""
self.assertSubstitution(text, expect, subs)
class ScrapeHeadersTest(LinkTest):
def testBasic(self):
text = """
+ Header 1
+ Not a header
+ Header 2
+ Header 3
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = [
'Title 1',
'Title 2',
'Title 3',
]
self.assertHeaders(text, regex, translate, expect)
def testNoHeaders(self):
text = """
No headers
to be found!
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = []
self.assertHeaders(text, regex, translate, expect)
def testCounter(self):
text = """
+ Header To be
+ Header Or not to be
+ Header That is the question
"""
regex = r"\+ Header ([\w ]+)"
count = 0
def translate(m):
nonlocal count
count += 1
return "Section {}: {}".format(count, m.group(1))
expect = [
'Section 1: To be',
'Section 2: Or not to be',
'Section 3: That is the question',
]
self.assertHeaders(text, regex, translate, expect)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add substitution and scrape_header tests<commit_after>
|
from utils import LinkTest, main
import re
class SubstitutionTest(LinkTest):
def testSubstititonFunction(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", lambda m: 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testSubstitionString(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testOrdering(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!'),
(r"<.+>", 'Bye!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testCounter(self):
text = """
<question>
<question>
<question>
"""
count = 0
def sub(m):
nonlocal count
count += 1
return '<h1>Question {}</h1>'.format(count)
subs = [
(r"<question>", sub),
]
expect = """
<h1>Question 1</h1>
<h1>Question 2</h1>
<h1>Question 3</h1>
"""
self.assertSubstitution(text, expect, subs)
class ScrapeHeadersTest(LinkTest):
def testBasic(self):
text = """
+ Header 1
+ Not a header
+ Header 2
+ Header 3
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = [
'Title 1',
'Title 2',
'Title 3',
]
self.assertHeaders(text, regex, translate, expect)
def testNoHeaders(self):
text = """
No headers
to be found!
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = []
self.assertHeaders(text, regex, translate, expect)
def testCounter(self):
text = """
+ Header To be
+ Header Or not to be
+ Header That is the question
"""
regex = r"\+ Header ([\w ]+)"
count = 0
def translate(m):
nonlocal count
count += 1
return "Section {}: {}".format(count, m.group(1))
expect = [
'Section 1: To be',
'Section 2: Or not to be',
'Section 3: That is the question',
]
self.assertHeaders(text, regex, translate, expect)
if __name__ == '__main__':
main()
|
Add substitution and scrape_header testsfrom utils import LinkTest, main
import re
class SubstitutionTest(LinkTest):
def testSubstititonFunction(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", lambda m: 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testSubstitionString(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testOrdering(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!'),
(r"<.+>", 'Bye!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testCounter(self):
text = """
<question>
<question>
<question>
"""
count = 0
def sub(m):
nonlocal count
count += 1
return '<h1>Question {}</h1>'.format(count)
subs = [
(r"<question>", sub),
]
expect = """
<h1>Question 1</h1>
<h1>Question 2</h1>
<h1>Question 3</h1>
"""
self.assertSubstitution(text, expect, subs)
class ScrapeHeadersTest(LinkTest):
def testBasic(self):
text = """
+ Header 1
+ Not a header
+ Header 2
+ Header 3
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = [
'Title 1',
'Title 2',
'Title 3',
]
self.assertHeaders(text, regex, translate, expect)
def testNoHeaders(self):
text = """
No headers
to be found!
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = []
self.assertHeaders(text, regex, translate, expect)
def testCounter(self):
text = """
+ Header To be
+ Header Or not to be
+ Header That is the question
"""
regex = r"\+ Header ([\w ]+)"
count = 0
def translate(m):
nonlocal count
count += 1
return "Section {}: {}".format(count, m.group(1))
expect = [
'Section 1: To be',
'Section 2: Or not to be',
'Section 3: That is the question',
]
self.assertHeaders(text, regex, translate, expect)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add substitution and scrape_header tests<commit_after>from utils import LinkTest, main
import re
class SubstitutionTest(LinkTest):
def testSubstititonFunction(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", lambda m: 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testSubstitionString(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testOrdering(self):
text = """
<pattern>
"""
subs = [
(r"<pattern>", 'Hello world!'),
(r"<.+>", 'Bye!')
]
expect = """
Hello world!
"""
self.assertSubstitution(text, expect, subs)
def testCounter(self):
text = """
<question>
<question>
<question>
"""
count = 0
def sub(m):
nonlocal count
count += 1
return '<h1>Question {}</h1>'.format(count)
subs = [
(r"<question>", sub),
]
expect = """
<h1>Question 1</h1>
<h1>Question 2</h1>
<h1>Question 3</h1>
"""
self.assertSubstitution(text, expect, subs)
class ScrapeHeadersTest(LinkTest):
def testBasic(self):
text = """
+ Header 1
+ Not a header
+ Header 2
+ Header 3
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = [
'Title 1',
'Title 2',
'Title 3',
]
self.assertHeaders(text, regex, translate, expect)
def testNoHeaders(self):
text = """
No headers
to be found!
"""
regex = r"\+ Header (\d+)"
translate = lambda m: 'Title ' + m.group(1)
expect = []
self.assertHeaders(text, regex, translate, expect)
def testCounter(self):
text = """
+ Header To be
+ Header Or not to be
+ Header That is the question
"""
regex = r"\+ Header ([\w ]+)"
count = 0
def translate(m):
nonlocal count
count += 1
return "Section {}: {}".format(count, m.group(1))
expect = [
'Section 1: To be',
'Section 2: Or not to be',
'Section 3: That is the question',
]
self.assertHeaders(text, regex, translate, expect)
if __name__ == '__main__':
main()
|
|
b7af9190f9edf015508665c9b257432434311279
|
iri_to_uri.py
|
iri_to_uri.py
|
from urllib.parse import quote, urlsplit, urlunsplit
def transform(iri): # преобразует кириллицу в URI
parts = urlsplit(iri)
uri = urlunsplit((parts.scheme, parts.netloc.encode('idna').decode(
'ascii'), quote(parts.path), quote(parts.query, '='), quote(parts.fragment),))
return uri
|
Revert "fix deep night bugs :)"
|
Revert "fix deep night bugs :)"
|
Python
|
mit
|
dimishpatriot/img_pars
|
Revert "fix deep night bugs :)"
|
from urllib.parse import quote, urlsplit, urlunsplit
def transform(iri): # преобразует кириллицу в URI
parts = urlsplit(iri)
uri = urlunsplit((parts.scheme, parts.netloc.encode('idna').decode(
'ascii'), quote(parts.path), quote(parts.query, '='), quote(parts.fragment),))
return uri
|
<commit_before><commit_msg>Revert "fix deep night bugs :)"<commit_after>
|
from urllib.parse import quote, urlsplit, urlunsplit
def transform(iri): # преобразует кириллицу в URI
parts = urlsplit(iri)
uri = urlunsplit((parts.scheme, parts.netloc.encode('idna').decode(
'ascii'), quote(parts.path), quote(parts.query, '='), quote(parts.fragment),))
return uri
|
Revert "fix deep night bugs :)"from urllib.parse import quote, urlsplit, urlunsplit
def transform(iri): # преобразует кириллицу в URI
parts = urlsplit(iri)
uri = urlunsplit((parts.scheme, parts.netloc.encode('idna').decode(
'ascii'), quote(parts.path), quote(parts.query, '='), quote(parts.fragment),))
return uri
|
<commit_before><commit_msg>Revert "fix deep night bugs :)"<commit_after>from urllib.parse import quote, urlsplit, urlunsplit
def transform(iri): # преобразует кириллицу в URI
parts = urlsplit(iri)
uri = urlunsplit((parts.scheme, parts.netloc.encode('idna').decode(
'ascii'), quote(parts.path), quote(parts.query, '='), quote(parts.fragment),))
return uri
|
|
a78c0cec23365ed6861b68f40fd7481787aa2d81
|
tests/organize/test_views.py
|
tests/organize/test_views.py
|
from django.urls import reverse
def test_form_thank_you(client):
# Access the thank you page
resp = client.get(reverse('organize:form_thank_you'))
assert resp.status_code == 200
def test_index(client):
# Access the organize homepage
resp = client.get(reverse('organize:index'))
assert resp.status_code == 200
def test_commitment(client):
# Access the commitment page
resp = client.get(reverse('organize:commitment'))
assert resp.status_code == 200
def test_prerequisites(client):
# Access prerequisites page
resp = client.get(reverse('organize:prerequisites'))
assert resp.status_code == 200
def test_suspend(client):
# Access suspend page
resp = client.get(reverse('organize:suspend'))
assert resp.status_code == 200
|
Add tests for organize views
|
Add tests for organize views
|
Python
|
bsd-3-clause
|
DjangoGirls/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls
|
Add tests for organize views
|
from django.urls import reverse
def test_form_thank_you(client):
# Access the thank you page
resp = client.get(reverse('organize:form_thank_you'))
assert resp.status_code == 200
def test_index(client):
# Access the organize homepage
resp = client.get(reverse('organize:index'))
assert resp.status_code == 200
def test_commitment(client):
# Access the commitment page
resp = client.get(reverse('organize:commitment'))
assert resp.status_code == 200
def test_prerequisites(client):
# Access prerequisites page
resp = client.get(reverse('organize:prerequisites'))
assert resp.status_code == 200
def test_suspend(client):
# Access suspend page
resp = client.get(reverse('organize:suspend'))
assert resp.status_code == 200
|
<commit_before><commit_msg>Add tests for organize views<commit_after>
|
from django.urls import reverse
def test_form_thank_you(client):
# Access the thank you page
resp = client.get(reverse('organize:form_thank_you'))
assert resp.status_code == 200
def test_index(client):
# Access the organize homepage
resp = client.get(reverse('organize:index'))
assert resp.status_code == 200
def test_commitment(client):
# Access the commitment page
resp = client.get(reverse('organize:commitment'))
assert resp.status_code == 200
def test_prerequisites(client):
# Access prerequisites page
resp = client.get(reverse('organize:prerequisites'))
assert resp.status_code == 200
def test_suspend(client):
# Access suspend page
resp = client.get(reverse('organize:suspend'))
assert resp.status_code == 200
|
Add tests for organize viewsfrom django.urls import reverse
def test_form_thank_you(client):
# Access the thank you page
resp = client.get(reverse('organize:form_thank_you'))
assert resp.status_code == 200
def test_index(client):
# Access the organize homepage
resp = client.get(reverse('organize:index'))
assert resp.status_code == 200
def test_commitment(client):
# Access the commitment page
resp = client.get(reverse('organize:commitment'))
assert resp.status_code == 200
def test_prerequisites(client):
# Access prerequisites page
resp = client.get(reverse('organize:prerequisites'))
assert resp.status_code == 200
def test_suspend(client):
# Access suspend page
resp = client.get(reverse('organize:suspend'))
assert resp.status_code == 200
|
<commit_before><commit_msg>Add tests for organize views<commit_after>from django.urls import reverse
def test_form_thank_you(client):
# Access the thank you page
resp = client.get(reverse('organize:form_thank_you'))
assert resp.status_code == 200
def test_index(client):
# Access the organize homepage
resp = client.get(reverse('organize:index'))
assert resp.status_code == 200
def test_commitment(client):
# Access the commitment page
resp = client.get(reverse('organize:commitment'))
assert resp.status_code == 200
def test_prerequisites(client):
# Access prerequisites page
resp = client.get(reverse('organize:prerequisites'))
assert resp.status_code == 200
def test_suspend(client):
# Access suspend page
resp = client.get(reverse('organize:suspend'))
assert resp.status_code == 200
|
|
4036fbd858217677a3f21cf95ae5ec611ca23e61
|
scipy/linalg/tests/test_build.py
|
scipy/linalg/tests/test_build.py
|
from subprocess import call, PIPE, Popen
import sys
import re
import numpy as np
from numpy.testing import TestCase, dec
from scipy.linalg import flapack
# XXX: this is copied from numpy trunk. Can be removed when we will depend on
# numpy 1.3
class FindDependenciesLdd:
def __init__(self):
self.cmd = ['ldd']
try:
st = call(self.cmd, stdout=PIPE, stderr=PIPE)
except OSError:
raise RuntimeError("command %s cannot be run" % self.cmd)
def get_dependencies(self, file):
p = Popen(self.cmd + [file], stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not (p.returncode == 0):
raise RuntimeError("Failed to check dependencies for %s" % libfile)
return stdout
def grep_dependencies(self, file, deps):
stdout = self.get_dependencies(file)
rdeps = dict([(dep, re.compile(dep)) for dep in deps])
founds = []
for l in stdout.splitlines():
for k, v in rdeps.items():
if v.search(l):
founds.append(k)
return founds
class TestF77Mismatch(TestCase):
@dec.skipif(not(sys.platform[:5] == 'linux'),
"Skipping fortran compiler mismatch on non Linux platform")
def test_lapack(self):
f = FindDependenciesLdd()
deps = f.grep_dependencies(flapack.__file__,
['libg2c', 'libgfortran'])
self.failIf(len(deps) > 1,
"""Both g77 and gfortran runtimes linked in lapack_lite ! This is likely to
cause random crashes and wrong results. See numpy INSTALL.txt for more
information.""")
|
Add fortran ABI mismatch test for scipy.linalg.
|
Add fortran ABI mismatch test for scipy.linalg.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5023 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
Python
|
bsd-3-clause
|
scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt
|
Add fortran ABI mismatch test for scipy.linalg.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5023 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
from subprocess import call, PIPE, Popen
import sys
import re
import numpy as np
from numpy.testing import TestCase, dec
from scipy.linalg import flapack
# XXX: this is copied from numpy trunk. Can be removed when we will depend on
# numpy 1.3
class FindDependenciesLdd:
def __init__(self):
self.cmd = ['ldd']
try:
st = call(self.cmd, stdout=PIPE, stderr=PIPE)
except OSError:
raise RuntimeError("command %s cannot be run" % self.cmd)
def get_dependencies(self, file):
p = Popen(self.cmd + [file], stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not (p.returncode == 0):
raise RuntimeError("Failed to check dependencies for %s" % libfile)
return stdout
def grep_dependencies(self, file, deps):
stdout = self.get_dependencies(file)
rdeps = dict([(dep, re.compile(dep)) for dep in deps])
founds = []
for l in stdout.splitlines():
for k, v in rdeps.items():
if v.search(l):
founds.append(k)
return founds
class TestF77Mismatch(TestCase):
@dec.skipif(not(sys.platform[:5] == 'linux'),
"Skipping fortran compiler mismatch on non Linux platform")
def test_lapack(self):
f = FindDependenciesLdd()
deps = f.grep_dependencies(flapack.__file__,
['libg2c', 'libgfortran'])
self.failIf(len(deps) > 1,
"""Both g77 and gfortran runtimes linked in lapack_lite ! This is likely to
cause random crashes and wrong results. See numpy INSTALL.txt for more
information.""")
|
<commit_before><commit_msg>Add fortran ABI mismatch test for scipy.linalg.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5023 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
|
from subprocess import call, PIPE, Popen
import sys
import re
import numpy as np
from numpy.testing import TestCase, dec
from scipy.linalg import flapack
# XXX: this is copied from numpy trunk. Can be removed when we will depend on
# numpy 1.3
class FindDependenciesLdd:
def __init__(self):
self.cmd = ['ldd']
try:
st = call(self.cmd, stdout=PIPE, stderr=PIPE)
except OSError:
raise RuntimeError("command %s cannot be run" % self.cmd)
def get_dependencies(self, file):
p = Popen(self.cmd + [file], stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not (p.returncode == 0):
raise RuntimeError("Failed to check dependencies for %s" % libfile)
return stdout
def grep_dependencies(self, file, deps):
stdout = self.get_dependencies(file)
rdeps = dict([(dep, re.compile(dep)) for dep in deps])
founds = []
for l in stdout.splitlines():
for k, v in rdeps.items():
if v.search(l):
founds.append(k)
return founds
class TestF77Mismatch(TestCase):
@dec.skipif(not(sys.platform[:5] == 'linux'),
"Skipping fortran compiler mismatch on non Linux platform")
def test_lapack(self):
f = FindDependenciesLdd()
deps = f.grep_dependencies(flapack.__file__,
['libg2c', 'libgfortran'])
self.failIf(len(deps) > 1,
"""Both g77 and gfortran runtimes linked in lapack_lite ! This is likely to
cause random crashes and wrong results. See numpy INSTALL.txt for more
information.""")
|
Add fortran ABI mismatch test for scipy.linalg.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5023 d6536bca-fef9-0310-8506-e4c0a848fbcffrom subprocess import call, PIPE, Popen
import sys
import re
import numpy as np
from numpy.testing import TestCase, dec
from scipy.linalg import flapack
# XXX: this is copied from numpy trunk. Can be removed when we will depend on
# numpy 1.3
class FindDependenciesLdd:
def __init__(self):
self.cmd = ['ldd']
try:
st = call(self.cmd, stdout=PIPE, stderr=PIPE)
except OSError:
raise RuntimeError("command %s cannot be run" % self.cmd)
def get_dependencies(self, file):
p = Popen(self.cmd + [file], stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not (p.returncode == 0):
raise RuntimeError("Failed to check dependencies for %s" % libfile)
return stdout
def grep_dependencies(self, file, deps):
stdout = self.get_dependencies(file)
rdeps = dict([(dep, re.compile(dep)) for dep in deps])
founds = []
for l in stdout.splitlines():
for k, v in rdeps.items():
if v.search(l):
founds.append(k)
return founds
class TestF77Mismatch(TestCase):
@dec.skipif(not(sys.platform[:5] == 'linux'),
"Skipping fortran compiler mismatch on non Linux platform")
def test_lapack(self):
f = FindDependenciesLdd()
deps = f.grep_dependencies(flapack.__file__,
['libg2c', 'libgfortran'])
self.failIf(len(deps) > 1,
"""Both g77 and gfortran runtimes linked in lapack_lite ! This is likely to
cause random crashes and wrong results. See numpy INSTALL.txt for more
information.""")
|
<commit_before><commit_msg>Add fortran ABI mismatch test for scipy.linalg.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5023 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>from subprocess import call, PIPE, Popen
import sys
import re
import numpy as np
from numpy.testing import TestCase, dec
from scipy.linalg import flapack
# XXX: this is copied from numpy trunk. Can be removed when we will depend on
# numpy 1.3
class FindDependenciesLdd:
def __init__(self):
self.cmd = ['ldd']
try:
st = call(self.cmd, stdout=PIPE, stderr=PIPE)
except OSError:
raise RuntimeError("command %s cannot be run" % self.cmd)
def get_dependencies(self, file):
p = Popen(self.cmd + [file], stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not (p.returncode == 0):
raise RuntimeError("Failed to check dependencies for %s" % libfile)
return stdout
def grep_dependencies(self, file, deps):
stdout = self.get_dependencies(file)
rdeps = dict([(dep, re.compile(dep)) for dep in deps])
founds = []
for l in stdout.splitlines():
for k, v in rdeps.items():
if v.search(l):
founds.append(k)
return founds
class TestF77Mismatch(TestCase):
@dec.skipif(not(sys.platform[:5] == 'linux'),
"Skipping fortran compiler mismatch on non Linux platform")
def test_lapack(self):
f = FindDependenciesLdd()
deps = f.grep_dependencies(flapack.__file__,
['libg2c', 'libgfortran'])
self.failIf(len(deps) > 1,
"""Both g77 and gfortran runtimes linked in lapack_lite ! This is likely to
cause random crashes and wrong results. See numpy INSTALL.txt for more
information.""")
|
|
4899fdff735e0dd099764327eb6e1c88ca40fb58
|
Home/feedPigeons.py
|
Home/feedPigeons.py
|
def checkio(n):
return feed(n, 1, 0)
def feed(n, pigeon, last):
if n <= last:
return last
if last < n <= pigeon:
return n
if n > pigeon:
return feed(n - pigeon, 2 * pigeon - last + 1, pigeon)
if __name__ == '__main__':
assert checkio(0) == 0, 0
assert checkio(1) == 1, 1
assert checkio(2) == 1, 2
assert checkio(5) == 3, 5
assert checkio(10) == 6, 10
print('All OK')
|
Fix the "Feed Pigeon" problem
|
Fix the "Feed Pigeon" problem
|
Python
|
mit
|
edwardzhu/checkio-solution
|
Fix the "Feed Pigeon" problem
|
def checkio(n):
return feed(n, 1, 0)
def feed(n, pigeon, last):
if n <= last:
return last
if last < n <= pigeon:
return n
if n > pigeon:
return feed(n - pigeon, 2 * pigeon - last + 1, pigeon)
if __name__ == '__main__':
assert checkio(0) == 0, 0
assert checkio(1) == 1, 1
assert checkio(2) == 1, 2
assert checkio(5) == 3, 5
assert checkio(10) == 6, 10
print('All OK')
|
<commit_before><commit_msg>Fix the "Feed Pigeon" problem<commit_after>
|
def checkio(n):
return feed(n, 1, 0)
def feed(n, pigeon, last):
if n <= last:
return last
if last < n <= pigeon:
return n
if n > pigeon:
return feed(n - pigeon, 2 * pigeon - last + 1, pigeon)
if __name__ == '__main__':
assert checkio(0) == 0, 0
assert checkio(1) == 1, 1
assert checkio(2) == 1, 2
assert checkio(5) == 3, 5
assert checkio(10) == 6, 10
print('All OK')
|
Fix the "Feed Pigeon" problemdef checkio(n):
return feed(n, 1, 0)
def feed(n, pigeon, last):
if n <= last:
return last
if last < n <= pigeon:
return n
if n > pigeon:
return feed(n - pigeon, 2 * pigeon - last + 1, pigeon)
if __name__ == '__main__':
assert checkio(0) == 0, 0
assert checkio(1) == 1, 1
assert checkio(2) == 1, 2
assert checkio(5) == 3, 5
assert checkio(10) == 6, 10
print('All OK')
|
<commit_before><commit_msg>Fix the "Feed Pigeon" problem<commit_after>def checkio(n):
return feed(n, 1, 0)
def feed(n, pigeon, last):
if n <= last:
return last
if last < n <= pigeon:
return n
if n > pigeon:
return feed(n - pigeon, 2 * pigeon - last + 1, pigeon)
if __name__ == '__main__':
assert checkio(0) == 0, 0
assert checkio(1) == 1, 1
assert checkio(2) == 1, 2
assert checkio(5) == 3, 5
assert checkio(10) == 6, 10
print('All OK')
|
|
07da39fcc68027d4373a467cbb08c35a5d941545
|
spreadflow_core/test/matchers.py
|
spreadflow_core/test/matchers.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from testtools import matchers
class MatchesInvocation(matchers.MatchesListwise):
"""
Matches an invocation recorded by :class:`unittest.mock.Mock.call_args`
Args:
*args: Matchers matching the recorded parameter at the same position.
**kwds: Matchers matching the recorded keyword parameter with the same
key.
"""
def __init__(self, *args, **kwds):
super(MatchesInvocation, self).__init__([
matchers.MatchesListwise(args),
matchers.MatchesDict(kwds)
])
|
Add a custom testtools matcher for verifying invocations
|
Add a custom testtools matcher for verifying invocations
|
Python
|
mit
|
znerol/spreadflow-core,spreadflow/spreadflow-core
|
Add a custom testtools matcher for verifying invocations
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from testtools import matchers
class MatchesInvocation(matchers.MatchesListwise):
"""
Matches an invocation recorded by :class:`unittest.mock.Mock.call_args`
Args:
*args: Matchers matching the recorded parameter at the same position.
**kwds: Matchers matching the recorded keyword parameter with the same
key.
"""
def __init__(self, *args, **kwds):
super(MatchesInvocation, self).__init__([
matchers.MatchesListwise(args),
matchers.MatchesDict(kwds)
])
|
<commit_before><commit_msg>Add a custom testtools matcher for verifying invocations<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from testtools import matchers
class MatchesInvocation(matchers.MatchesListwise):
"""
Matches an invocation recorded by :class:`unittest.mock.Mock.call_args`
Args:
*args: Matchers matching the recorded parameter at the same position.
**kwds: Matchers matching the recorded keyword parameter with the same
key.
"""
def __init__(self, *args, **kwds):
super(MatchesInvocation, self).__init__([
matchers.MatchesListwise(args),
matchers.MatchesDict(kwds)
])
|
Add a custom testtools matcher for verifying invocationsfrom __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from testtools import matchers
class MatchesInvocation(matchers.MatchesListwise):
"""
Matches an invocation recorded by :class:`unittest.mock.Mock.call_args`
Args:
*args: Matchers matching the recorded parameter at the same position.
**kwds: Matchers matching the recorded keyword parameter with the same
key.
"""
def __init__(self, *args, **kwds):
super(MatchesInvocation, self).__init__([
matchers.MatchesListwise(args),
matchers.MatchesDict(kwds)
])
|
<commit_before><commit_msg>Add a custom testtools matcher for verifying invocations<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from testtools import matchers
class MatchesInvocation(matchers.MatchesListwise):
"""
Matches an invocation recorded by :class:`unittest.mock.Mock.call_args`
Args:
*args: Matchers matching the recorded parameter at the same position.
**kwds: Matchers matching the recorded keyword parameter with the same
key.
"""
def __init__(self, *args, **kwds):
super(MatchesInvocation, self).__init__([
matchers.MatchesListwise(args),
matchers.MatchesDict(kwds)
])
|
|
de10ebd4192576a7a9f9fc73871cc56479f4d686
|
dedupsqlfs/lib/cache/_base.py
|
dedupsqlfs/lib/cache/_base.py
|
# -*- coding: utf8 -*-
"""
@author Sergey Dryabzhinsky
"""
from time import time
class TimedCache(object):
"""
Cache storage with timers
"""
_enable_timers = True
def __init__(self):
self._time_spent = {}
self._op_count = {}
pass
def setEnableTimers(self, flag=True):
self._enable_timers = flag is True
return self
def getOperationsCount(self):
return self._op_count
def getAllOperationsCount(self):
s = 0
if not self._enable_timers:
return s
for op, c in self._op_count.items():
s += c
return s
def incOperationsCount(self, op):
if not self._enable_timers:
return self
if not (op in self._op_count):
self._op_count[ op ] = 0
self._op_count[ op ] += 1
return self
def getTimeSpent(self):
return self._time_spent
def getAllTimeSpent(self):
s = 0
if not self._enable_timers:
return s
for op, t in self._time_spent.items():
s += t
return s
def incOperationsTimeSpent(self, op, start_time):
if not self._enable_timers:
return self
if not (op in self._time_spent):
self._time_spent[ op ] = 0
self._time_spent[ op ] += time() - start_time
return self
def startTimer(self):
if not self._enable_timers:
return self
self._last_time = time()
return self
def stopTimer(self, op):
if not self._enable_timers:
return self
self.incOperationsCount(op)
self.incOperationsTimeSpent(op, self._last_time)
self._last_time = None
return self
|
Fix release - add missed file - forced
|
Fix release - add missed file - forced
|
Python
|
mit
|
sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs
|
Fix release - add missed file - forced
|
# -*- coding: utf8 -*-
"""
@author Sergey Dryabzhinsky
"""
from time import time
class TimedCache(object):
"""
Cache storage with timers
"""
_enable_timers = True
def __init__(self):
self._time_spent = {}
self._op_count = {}
pass
def setEnableTimers(self, flag=True):
self._enable_timers = flag is True
return self
def getOperationsCount(self):
return self._op_count
def getAllOperationsCount(self):
s = 0
if not self._enable_timers:
return s
for op, c in self._op_count.items():
s += c
return s
def incOperationsCount(self, op):
if not self._enable_timers:
return self
if not (op in self._op_count):
self._op_count[ op ] = 0
self._op_count[ op ] += 1
return self
def getTimeSpent(self):
return self._time_spent
def getAllTimeSpent(self):
s = 0
if not self._enable_timers:
return s
for op, t in self._time_spent.items():
s += t
return s
def incOperationsTimeSpent(self, op, start_time):
if not self._enable_timers:
return self
if not (op in self._time_spent):
self._time_spent[ op ] = 0
self._time_spent[ op ] += time() - start_time
return self
def startTimer(self):
if not self._enable_timers:
return self
self._last_time = time()
return self
def stopTimer(self, op):
if not self._enable_timers:
return self
self.incOperationsCount(op)
self.incOperationsTimeSpent(op, self._last_time)
self._last_time = None
return self
|
<commit_before><commit_msg>Fix release - add missed file - forced<commit_after>
|
# -*- coding: utf8 -*-
"""
@author Sergey Dryabzhinsky
"""
from time import time
class TimedCache(object):
"""
Cache storage with timers
"""
_enable_timers = True
def __init__(self):
self._time_spent = {}
self._op_count = {}
pass
def setEnableTimers(self, flag=True):
self._enable_timers = flag is True
return self
def getOperationsCount(self):
return self._op_count
def getAllOperationsCount(self):
s = 0
if not self._enable_timers:
return s
for op, c in self._op_count.items():
s += c
return s
def incOperationsCount(self, op):
if not self._enable_timers:
return self
if not (op in self._op_count):
self._op_count[ op ] = 0
self._op_count[ op ] += 1
return self
def getTimeSpent(self):
return self._time_spent
def getAllTimeSpent(self):
s = 0
if not self._enable_timers:
return s
for op, t in self._time_spent.items():
s += t
return s
def incOperationsTimeSpent(self, op, start_time):
if not self._enable_timers:
return self
if not (op in self._time_spent):
self._time_spent[ op ] = 0
self._time_spent[ op ] += time() - start_time
return self
def startTimer(self):
if not self._enable_timers:
return self
self._last_time = time()
return self
def stopTimer(self, op):
if not self._enable_timers:
return self
self.incOperationsCount(op)
self.incOperationsTimeSpent(op, self._last_time)
self._last_time = None
return self
|
Fix release - add missed file - forced# -*- coding: utf8 -*-
"""
@author Sergey Dryabzhinsky
"""
from time import time
class TimedCache(object):
"""
Cache storage with timers
"""
_enable_timers = True
def __init__(self):
self._time_spent = {}
self._op_count = {}
pass
def setEnableTimers(self, flag=True):
self._enable_timers = flag is True
return self
def getOperationsCount(self):
return self._op_count
def getAllOperationsCount(self):
s = 0
if not self._enable_timers:
return s
for op, c in self._op_count.items():
s += c
return s
def incOperationsCount(self, op):
if not self._enable_timers:
return self
if not (op in self._op_count):
self._op_count[ op ] = 0
self._op_count[ op ] += 1
return self
def getTimeSpent(self):
return self._time_spent
def getAllTimeSpent(self):
s = 0
if not self._enable_timers:
return s
for op, t in self._time_spent.items():
s += t
return s
def incOperationsTimeSpent(self, op, start_time):
if not self._enable_timers:
return self
if not (op in self._time_spent):
self._time_spent[ op ] = 0
self._time_spent[ op ] += time() - start_time
return self
def startTimer(self):
if not self._enable_timers:
return self
self._last_time = time()
return self
def stopTimer(self, op):
if not self._enable_timers:
return self
self.incOperationsCount(op)
self.incOperationsTimeSpent(op, self._last_time)
self._last_time = None
return self
|
<commit_before><commit_msg>Fix release - add missed file - forced<commit_after># -*- coding: utf8 -*-
"""
@author Sergey Dryabzhinsky
"""
from time import time
class TimedCache(object):
"""
Cache storage with timers
"""
_enable_timers = True
def __init__(self):
self._time_spent = {}
self._op_count = {}
pass
def setEnableTimers(self, flag=True):
self._enable_timers = flag is True
return self
def getOperationsCount(self):
return self._op_count
def getAllOperationsCount(self):
s = 0
if not self._enable_timers:
return s
for op, c in self._op_count.items():
s += c
return s
def incOperationsCount(self, op):
if not self._enable_timers:
return self
if not (op in self._op_count):
self._op_count[ op ] = 0
self._op_count[ op ] += 1
return self
def getTimeSpent(self):
return self._time_spent
def getAllTimeSpent(self):
s = 0
if not self._enable_timers:
return s
for op, t in self._time_spent.items():
s += t
return s
def incOperationsTimeSpent(self, op, start_time):
if not self._enable_timers:
return self
if not (op in self._time_spent):
self._time_spent[ op ] = 0
self._time_spent[ op ] += time() - start_time
return self
def startTimer(self):
if not self._enable_timers:
return self
self._last_time = time()
return self
def stopTimer(self, op):
if not self._enable_timers:
return self
self.incOperationsCount(op)
self.incOperationsTimeSpent(op, self._last_time)
self._last_time = None
return self
|
|
fe65f99972a940b13652e98778fdaec8f98142bc
|
limit_tabs.py
|
limit_tabs.py
|
"""Same idea as ZenTabs, but simplified logic since we know tabs are already
ordered by MRU.
Unlike ZenTabs, this also makes it much easier to customize tabs that you
don't want to be closeable.
It also tweaks the formula: tabs that are not closable are *not* included
in the liit.
"""
from .lib import settled_event
LIMIT = 12
# Not using on_load because that can get called while a view is still
# transient -- we need to run when it becomes persistent.
@settled_event.add_listener
def on_settled_async(settled_view):
window = settled_view.window()
group = window.active_group()
views = window.views_in_group(group)
if len(views) < LIMIT:
return
if same_view_count(window, group, views):
return
active_view_id = window.active_view().id()
num_closeable = 0
for view in views:
if is_closable(view):
num_closeable += 1
# In practice we should never end up with the active view, since
# it should always be the first tab.
if num_closeable > LIMIT and view.id() != active_view_id:
view.close()
def same_view_count(window, group, views):
last_counts = window.settings().get("limit_tabs__last_counts", {})
# Sublime requires keys to be strings
if last_counts.get(str(group)) == len(views):
return
last_counts[str(group)] = len(views)
# settings().get() returns a copy, so we have to update with set()
window.settings().set("limit_tabs__last_counts", last_counts)
def is_closable(view):
return not (
view.is_dirty()
# Scratch buffers never get set as dirty and don't prompt to save
# when you close them. I'm not sure how they get created other than
# via the API.
or view.is_scratch()
or view.is_loading()
or (
view.settings().get("syntax")
== "Packages/Default/Find Results.hidden-tmLanguage"
)
)
|
Add LimitTabs plugin to replace ZenTabs
|
Add LimitTabs plugin to replace ZenTabs
|
Python
|
mit
|
russelldavis/sublimerc
|
Add LimitTabs plugin to replace ZenTabs
|
"""Same idea as ZenTabs, but simplified logic since we know tabs are already
ordered by MRU.
Unlike ZenTabs, this also makes it much easier to customize tabs that you
don't want to be closeable.
It also tweaks the formula: tabs that are not closable are *not* included
in the liit.
"""
from .lib import settled_event
LIMIT = 12
# Not using on_load because that can get called while a view is still
# transient -- we need to run when it becomes persistent.
@settled_event.add_listener
def on_settled_async(settled_view):
window = settled_view.window()
group = window.active_group()
views = window.views_in_group(group)
if len(views) < LIMIT:
return
if same_view_count(window, group, views):
return
active_view_id = window.active_view().id()
num_closeable = 0
for view in views:
if is_closable(view):
num_closeable += 1
# In practice we should never end up with the active view, since
# it should always be the first tab.
if num_closeable > LIMIT and view.id() != active_view_id:
view.close()
def same_view_count(window, group, views):
last_counts = window.settings().get("limit_tabs__last_counts", {})
# Sublime requires keys to be strings
if last_counts.get(str(group)) == len(views):
return
last_counts[str(group)] = len(views)
# settings().get() returns a copy, so we have to update with set()
window.settings().set("limit_tabs__last_counts", last_counts)
def is_closable(view):
return not (
view.is_dirty()
# Scratch buffers never get set as dirty and don't prompt to save
# when you close them. I'm not sure how they get created other than
# via the API.
or view.is_scratch()
or view.is_loading()
or (
view.settings().get("syntax")
== "Packages/Default/Find Results.hidden-tmLanguage"
)
)
|
<commit_before><commit_msg>Add LimitTabs plugin to replace ZenTabs<commit_after>
|
"""Same idea as ZenTabs, but simplified logic since we know tabs are already
ordered by MRU.
Unlike ZenTabs, this also makes it much easier to customize tabs that you
don't want to be closeable.
It also tweaks the formula: tabs that are not closable are *not* included
in the liit.
"""
from .lib import settled_event
LIMIT = 12
# Not using on_load because that can get called while a view is still
# transient -- we need to run when it becomes persistent.
@settled_event.add_listener
def on_settled_async(settled_view):
window = settled_view.window()
group = window.active_group()
views = window.views_in_group(group)
if len(views) < LIMIT:
return
if same_view_count(window, group, views):
return
active_view_id = window.active_view().id()
num_closeable = 0
for view in views:
if is_closable(view):
num_closeable += 1
# In practice we should never end up with the active view, since
# it should always be the first tab.
if num_closeable > LIMIT and view.id() != active_view_id:
view.close()
def same_view_count(window, group, views):
last_counts = window.settings().get("limit_tabs__last_counts", {})
# Sublime requires keys to be strings
if last_counts.get(str(group)) == len(views):
return
last_counts[str(group)] = len(views)
# settings().get() returns a copy, so we have to update with set()
window.settings().set("limit_tabs__last_counts", last_counts)
def is_closable(view):
return not (
view.is_dirty()
# Scratch buffers never get set as dirty and don't prompt to save
# when you close them. I'm not sure how they get created other than
# via the API.
or view.is_scratch()
or view.is_loading()
or (
view.settings().get("syntax")
== "Packages/Default/Find Results.hidden-tmLanguage"
)
)
|
Add LimitTabs plugin to replace ZenTabs"""Same idea as ZenTabs, but simplified logic since we know tabs are already
ordered by MRU.
Unlike ZenTabs, this also makes it much easier to customize tabs that you
don't want to be closeable.
It also tweaks the formula: tabs that are not closable are *not* included
in the liit.
"""
from .lib import settled_event
LIMIT = 12
# Not using on_load because that can get called while a view is still
# transient -- we need to run when it becomes persistent.
@settled_event.add_listener
def on_settled_async(settled_view):
window = settled_view.window()
group = window.active_group()
views = window.views_in_group(group)
if len(views) < LIMIT:
return
if same_view_count(window, group, views):
return
active_view_id = window.active_view().id()
num_closeable = 0
for view in views:
if is_closable(view):
num_closeable += 1
# In practice we should never end up with the active view, since
# it should always be the first tab.
if num_closeable > LIMIT and view.id() != active_view_id:
view.close()
def same_view_count(window, group, views):
last_counts = window.settings().get("limit_tabs__last_counts", {})
# Sublime requires keys to be strings
if last_counts.get(str(group)) == len(views):
return
last_counts[str(group)] = len(views)
# settings().get() returns a copy, so we have to update with set()
window.settings().set("limit_tabs__last_counts", last_counts)
def is_closable(view):
return not (
view.is_dirty()
# Scratch buffers never get set as dirty and don't prompt to save
# when you close them. I'm not sure how they get created other than
# via the API.
or view.is_scratch()
or view.is_loading()
or (
view.settings().get("syntax")
== "Packages/Default/Find Results.hidden-tmLanguage"
)
)
|
<commit_before><commit_msg>Add LimitTabs plugin to replace ZenTabs<commit_after>"""Same idea as ZenTabs, but simplified logic since we know tabs are already
ordered by MRU.
Unlike ZenTabs, this also makes it much easier to customize tabs that you
don't want to be closeable.
It also tweaks the formula: tabs that are not closable are *not* included
in the liit.
"""
from .lib import settled_event
LIMIT = 12
# Not using on_load because that can get called while a view is still
# transient -- we need to run when it becomes persistent.
@settled_event.add_listener
def on_settled_async(settled_view):
window = settled_view.window()
group = window.active_group()
views = window.views_in_group(group)
if len(views) < LIMIT:
return
if same_view_count(window, group, views):
return
active_view_id = window.active_view().id()
num_closeable = 0
for view in views:
if is_closable(view):
num_closeable += 1
# In practice we should never end up with the active view, since
# it should always be the first tab.
if num_closeable > LIMIT and view.id() != active_view_id:
view.close()
def same_view_count(window, group, views):
last_counts = window.settings().get("limit_tabs__last_counts", {})
# Sublime requires keys to be strings
if last_counts.get(str(group)) == len(views):
return
last_counts[str(group)] = len(views)
# settings().get() returns a copy, so we have to update with set()
window.settings().set("limit_tabs__last_counts", last_counts)
def is_closable(view):
return not (
view.is_dirty()
# Scratch buffers never get set as dirty and don't prompt to save
# when you close them. I'm not sure how they get created other than
# via the API.
or view.is_scratch()
or view.is_loading()
or (
view.settings().get("syntax")
== "Packages/Default/Find Results.hidden-tmLanguage"
)
)
|
|
1fcf95eb58a186f8fbb2d901a0242b3b39d960d2
|
examples/tv_to_rdf.py
|
examples/tv_to_rdf.py
|
#!/usr/bin/env python
"""
Converts an tag/value file to RDF format.
Usage: tv_to_rdf <tagvaluefile> <rdffile>
"""
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document, InvalidDocumentError
def convert(infile_name, outfile_name):
tagvalueparser = Parser(Builder(), StandardLogger())
tagvalueparser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = tagvalueparser.parse(data)
if not error:
# print map(lambda c: c.name, document.creation_info.creators)
print 'Parsing Successful'
with open(outfile_name, mode='w') as out:
write_document(document,out,validate = True)
else:
print 'Errors encountered while parsing tag value file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
infile_name = sys.argv[1]
outfile_name = sys.argv[2]
convert(infile_name, outfile_name)
|
Add example file to convert tv to rdf
|
Add example file to convert tv to rdf
Signed-off-by: Tushar Mittal <fbbe7fbe5386ca0b80ae985499e622beebee2b12@gmail.com>
|
Python
|
apache-2.0
|
spdx/tools-python
|
Add example file to convert tv to rdf
Signed-off-by: Tushar Mittal <fbbe7fbe5386ca0b80ae985499e622beebee2b12@gmail.com>
|
#!/usr/bin/env python
"""
Converts an tag/value file to RDF format.
Usage: tv_to_rdf <tagvaluefile> <rdffile>
"""
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document, InvalidDocumentError
def convert(infile_name, outfile_name):
tagvalueparser = Parser(Builder(), StandardLogger())
tagvalueparser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = tagvalueparser.parse(data)
if not error:
# print map(lambda c: c.name, document.creation_info.creators)
print 'Parsing Successful'
with open(outfile_name, mode='w') as out:
write_document(document,out,validate = True)
else:
print 'Errors encountered while parsing tag value file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
infile_name = sys.argv[1]
outfile_name = sys.argv[2]
convert(infile_name, outfile_name)
|
<commit_before><commit_msg>Add example file to convert tv to rdf
Signed-off-by: Tushar Mittal <fbbe7fbe5386ca0b80ae985499e622beebee2b12@gmail.com><commit_after>
|
#!/usr/bin/env python
"""
Converts an tag/value file to RDF format.
Usage: tv_to_rdf <tagvaluefile> <rdffile>
"""
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document, InvalidDocumentError
def convert(infile_name, outfile_name):
tagvalueparser = Parser(Builder(), StandardLogger())
tagvalueparser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = tagvalueparser.parse(data)
if not error:
# print map(lambda c: c.name, document.creation_info.creators)
print 'Parsing Successful'
with open(outfile_name, mode='w') as out:
write_document(document,out,validate = True)
else:
print 'Errors encountered while parsing tag value file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
infile_name = sys.argv[1]
outfile_name = sys.argv[2]
convert(infile_name, outfile_name)
|
Add example file to convert tv to rdf
Signed-off-by: Tushar Mittal <fbbe7fbe5386ca0b80ae985499e622beebee2b12@gmail.com>#!/usr/bin/env python
"""
Converts an tag/value file to RDF format.
Usage: tv_to_rdf <tagvaluefile> <rdffile>
"""
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document, InvalidDocumentError
def convert(infile_name, outfile_name):
tagvalueparser = Parser(Builder(), StandardLogger())
tagvalueparser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = tagvalueparser.parse(data)
if not error:
# print map(lambda c: c.name, document.creation_info.creators)
print 'Parsing Successful'
with open(outfile_name, mode='w') as out:
write_document(document,out,validate = True)
else:
print 'Errors encountered while parsing tag value file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
infile_name = sys.argv[1]
outfile_name = sys.argv[2]
convert(infile_name, outfile_name)
|
<commit_before><commit_msg>Add example file to convert tv to rdf
Signed-off-by: Tushar Mittal <fbbe7fbe5386ca0b80ae985499e622beebee2b12@gmail.com><commit_after>#!/usr/bin/env python
"""
Converts an tag/value file to RDF format.
Usage: tv_to_rdf <tagvaluefile> <rdffile>
"""
import sys
import codecs
from spdx.parsers.tagvalue import Parser
from spdx.parsers.loggers import StandardLogger
from spdx.parsers.tagvaluebuilders import Builder
from spdx.writers.rdf import write_document, InvalidDocumentError
def convert(infile_name, outfile_name):
tagvalueparser = Parser(Builder(), StandardLogger())
tagvalueparser.build()
with open(infile_name) as infile:
data = infile.read()
document, error = tagvalueparser.parse(data)
if not error:
# print map(lambda c: c.name, document.creation_info.creators)
print 'Parsing Successful'
with open(outfile_name, mode='w') as out:
write_document(document,out,validate = True)
else:
print 'Errors encountered while parsing tag value file.'
messages = []
document.validate(messages)
print '\n'.join(messages)
if __name__ == '__main__':
infile_name = sys.argv[1]
outfile_name = sys.argv[2]
convert(infile_name, outfile_name)
|
|
6e878dae6669b7344723e5c49e0dec736b86fc19
|
raco/relation_key.py
|
raco/relation_key.py
|
"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
|
Add a class to represent myria relation keys
|
Add a class to represent myria relation keys
|
Python
|
bsd-3-clause
|
uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco
|
Add a class to represent myria relation keys
|
"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
|
<commit_before><commit_msg>Add a class to represent myria relation keys<commit_after>
|
"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
|
Add a class to represent myria relation keys"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
|
<commit_before><commit_msg>Add a class to represent myria relation keys<commit_after>"""Representation of a Myria relation key.
Myria relations are identified by a tuple of user, program, relation_name."""
class RelationKey(object):
def __init__(user='public', program='adhoc', relation=None):
assert relation
self.user = user
self.program = program
self.relation = relation
def __repr__(self):
return 'RelationKey(%s,%s,%s)' % (self.user, self.program,
self.relation)
def __str__(self):
return '%s:%s:%s' % (self.user, self.program, self.relation)
def __eq__(self, other):
return self.user == other.user and self.program == other.program \
and self.relation == other.relation
@classmethod
def from_string(cls, s):
"""Create a RelationKey from a colon-delimited string."""
toks = s.split(':')
assert len(toks) <= 3
args = {'relation' : toks[-1]}
try:
args['program'] = toks[-2]
args['user'] = toks[-3]
except IndexError:
pass
return cls(**args)
|
|
432a798f340dd9e0ecb47bf5661a606d2a078547
|
test_echo.py
|
test_echo.py
|
from echo_client import client
def test_1():
assert client('This is a unicode test') == 'This is a unicode test'
def test_2():
assert client(u'This is an é unicode test') == u'This is an é unicode test'
|
Add tests for echo server; unicode input not passing tests yet
|
Add tests for echo server; unicode input not passing tests yet
|
Python
|
mit
|
jwarren116/network-tools,jwarren116/network-tools
|
Add tests for echo server; unicode input not passing tests yet
|
from echo_client import client
def test_1():
assert client('This is a unicode test') == 'This is a unicode test'
def test_2():
assert client(u'This is an é unicode test') == u'This is an é unicode test'
|
<commit_before><commit_msg>Add tests for echo server; unicode input not passing tests yet<commit_after>
|
from echo_client import client
def test_1():
assert client('This is a unicode test') == 'This is a unicode test'
def test_2():
assert client(u'This is an é unicode test') == u'This is an é unicode test'
|
Add tests for echo server; unicode input not passing tests yetfrom echo_client import client
def test_1():
assert client('This is a unicode test') == 'This is a unicode test'
def test_2():
assert client(u'This is an é unicode test') == u'This is an é unicode test'
|
<commit_before><commit_msg>Add tests for echo server; unicode input not passing tests yet<commit_after>from echo_client import client
def test_1():
assert client('This is a unicode test') == 'This is a unicode test'
def test_2():
assert client(u'This is an é unicode test') == u'This is an é unicode test'
|
|
da194d8c0b832a372d697ff4d3c87339334037d5
|
os_disk_config/tests/test_impl_blivet.py
|
os_disk_config/tests/test_impl_blivet.py
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import blivet
import mock
from oslotest import base
from os_disk_config import impl_blivet
class TestStandardPartition(base.BaseTestCase):
def setUp(self):
super(TestStandardPartition, self).setUp()
self.blivet_instance = mock.Mock(spec=blivet.Blivet)
self.patcher = mock.patch('blivet.Blivet')
self.addCleanup(self.patcher.stop)
self.mock_blivet = self.patcher.start()
self.mock_blivet.return_value = self.blivet_instance
def test_constructor(self):
dc = impl_blivet.BlivetDiskConfig()
self.blivet_instance.reset.assert_called_once_with()
|
Add initial blivet unit test
|
Add initial blivet unit test
Just a super simple sanity check of the class constructor. More
tests to follow.
|
Python
|
apache-2.0
|
rdo-management/os-disk-config,agroup/os-disk-config
|
Add initial blivet unit test
Just a super simple sanity check of the class constructor. More
tests to follow.
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import blivet
import mock
from oslotest import base
from os_disk_config import impl_blivet
class TestStandardPartition(base.BaseTestCase):
def setUp(self):
super(TestStandardPartition, self).setUp()
self.blivet_instance = mock.Mock(spec=blivet.Blivet)
self.patcher = mock.patch('blivet.Blivet')
self.addCleanup(self.patcher.stop)
self.mock_blivet = self.patcher.start()
self.mock_blivet.return_value = self.blivet_instance
def test_constructor(self):
dc = impl_blivet.BlivetDiskConfig()
self.blivet_instance.reset.assert_called_once_with()
|
<commit_before><commit_msg>Add initial blivet unit test
Just a super simple sanity check of the class constructor. More
tests to follow.<commit_after>
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import blivet
import mock
from oslotest import base
from os_disk_config import impl_blivet
class TestStandardPartition(base.BaseTestCase):
def setUp(self):
super(TestStandardPartition, self).setUp()
self.blivet_instance = mock.Mock(spec=blivet.Blivet)
self.patcher = mock.patch('blivet.Blivet')
self.addCleanup(self.patcher.stop)
self.mock_blivet = self.patcher.start()
self.mock_blivet.return_value = self.blivet_instance
def test_constructor(self):
dc = impl_blivet.BlivetDiskConfig()
self.blivet_instance.reset.assert_called_once_with()
|
Add initial blivet unit test
Just a super simple sanity check of the class constructor. More
tests to follow.# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import blivet
import mock
from oslotest import base
from os_disk_config import impl_blivet
class TestStandardPartition(base.BaseTestCase):
def setUp(self):
super(TestStandardPartition, self).setUp()
self.blivet_instance = mock.Mock(spec=blivet.Blivet)
self.patcher = mock.patch('blivet.Blivet')
self.addCleanup(self.patcher.stop)
self.mock_blivet = self.patcher.start()
self.mock_blivet.return_value = self.blivet_instance
def test_constructor(self):
dc = impl_blivet.BlivetDiskConfig()
self.blivet_instance.reset.assert_called_once_with()
|
<commit_before><commit_msg>Add initial blivet unit test
Just a super simple sanity check of the class constructor. More
tests to follow.<commit_after># Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import blivet
import mock
from oslotest import base
from os_disk_config import impl_blivet
class TestStandardPartition(base.BaseTestCase):
def setUp(self):
super(TestStandardPartition, self).setUp()
self.blivet_instance = mock.Mock(spec=blivet.Blivet)
self.patcher = mock.patch('blivet.Blivet')
self.addCleanup(self.patcher.stop)
self.mock_blivet = self.patcher.start()
self.mock_blivet.return_value = self.blivet_instance
def test_constructor(self):
dc = impl_blivet.BlivetDiskConfig()
self.blivet_instance.reset.assert_called_once_with()
|
|
1a29e5ab7e58b8eef69358d2fdfb0e9e26367fe2
|
app/sense.py
|
app/sense.py
|
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
|
Create app to record Pi Sense data
|
Create app to record Pi Sense data
|
Python
|
mit
|
thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
|
Create app to record Pi Sense data
|
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
|
<commit_before><commit_msg>Create app to record Pi Sense data<commit_after>
|
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
|
Create app to record Pi Sense data#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
|
<commit_before><commit_msg>Create app to record Pi Sense data<commit_after>#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
DEVICE = "PiSense"
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
|
|
e86ee27ec1142dcd44990c97f3531dd7940313b7
|
lamp_state_machine.py
|
lamp_state_machine.py
|
import unittest
class LampStateMachine(unittest.TestCase):
OFF = 0
LOW = 33
MEDIUM = 66
HIGH = 100
NO_TOUCH_DETECTED = 0
TOUCH_DETECTED = 1
__lamp_state_machine = \
{
OFF: [OFF, LOW],
LOW: [LOW, MEDIUM],
MEDIUM: [MEDIUM, HIGH],
HIGH: [HIGH, OFF]
}
def get_next_state(self, current_state, input_value):
next_states = self.__lamp_state_machine[current_state]
return next_states[input_value]
def test_givenACurrentStateOfOffAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.OFF, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfOffAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.OFF, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.LOW, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.LOW, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.MEDIUM, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.MEDIUM, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.HIGH, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.HIGH, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
Add a new class to handle the valid states for the touch lamp
|
Add a new class to handle the valid states for the touch lamp
|
Python
|
bsd-2-clause
|
thelukemccarthy/Wakeup-Lamp,thelukemccarthy/Wakeup-Lamp,thelukemccarthy/Wakeup-Lamp,thelukemccarthy/Wakeup-Lamp
|
Add a new class to handle the valid states for the touch lamp
|
import unittest
class LampStateMachine(unittest.TestCase):
OFF = 0
LOW = 33
MEDIUM = 66
HIGH = 100
NO_TOUCH_DETECTED = 0
TOUCH_DETECTED = 1
__lamp_state_machine = \
{
OFF: [OFF, LOW],
LOW: [LOW, MEDIUM],
MEDIUM: [MEDIUM, HIGH],
HIGH: [HIGH, OFF]
}
def get_next_state(self, current_state, input_value):
next_states = self.__lamp_state_machine[current_state]
return next_states[input_value]
def test_givenACurrentStateOfOffAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.OFF, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfOffAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.OFF, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.LOW, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.LOW, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.MEDIUM, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.MEDIUM, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.HIGH, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.HIGH, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a new class to handle the valid states for the touch lamp<commit_after>
|
import unittest
class LampStateMachine(unittest.TestCase):
OFF = 0
LOW = 33
MEDIUM = 66
HIGH = 100
NO_TOUCH_DETECTED = 0
TOUCH_DETECTED = 1
__lamp_state_machine = \
{
OFF: [OFF, LOW],
LOW: [LOW, MEDIUM],
MEDIUM: [MEDIUM, HIGH],
HIGH: [HIGH, OFF]
}
def get_next_state(self, current_state, input_value):
next_states = self.__lamp_state_machine[current_state]
return next_states[input_value]
def test_givenACurrentStateOfOffAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.OFF, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfOffAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.OFF, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.LOW, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.LOW, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.MEDIUM, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.MEDIUM, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.HIGH, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.HIGH, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
Add a new class to handle the valid states for the touch lampimport unittest
class LampStateMachine(unittest.TestCase):
OFF = 0
LOW = 33
MEDIUM = 66
HIGH = 100
NO_TOUCH_DETECTED = 0
TOUCH_DETECTED = 1
__lamp_state_machine = \
{
OFF: [OFF, LOW],
LOW: [LOW, MEDIUM],
MEDIUM: [MEDIUM, HIGH],
HIGH: [HIGH, OFF]
}
def get_next_state(self, current_state, input_value):
next_states = self.__lamp_state_machine[current_state]
return next_states[input_value]
def test_givenACurrentStateOfOffAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.OFF, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfOffAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.OFF, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.LOW, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.LOW, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.MEDIUM, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.MEDIUM, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.HIGH, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.HIGH, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a new class to handle the valid states for the touch lamp<commit_after>import unittest
class LampStateMachine(unittest.TestCase):
OFF = 0
LOW = 33
MEDIUM = 66
HIGH = 100
NO_TOUCH_DETECTED = 0
TOUCH_DETECTED = 1
__lamp_state_machine = \
{
OFF: [OFF, LOW],
LOW: [LOW, MEDIUM],
MEDIUM: [MEDIUM, HIGH],
HIGH: [HIGH, OFF]
}
def get_next_state(self, current_state, input_value):
next_states = self.__lamp_state_machine[current_state]
return next_states[input_value]
def test_givenACurrentStateOfOffAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.OFF, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfOffAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.OFF, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeLow(self):
expected = self.LOW
actual = self.get_next_state(self.LOW, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfLowAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.LOW, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeMedium(self):
expected = self.MEDIUM
actual = self.get_next_state(self.MEDIUM, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfMediumAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.MEDIUM, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndNoTouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeHigh(self):
expected = self.HIGH
actual = self.get_next_state(self.HIGH, self.NO_TOUCH_DETECTED)
self.assertEqual(expected, actual)
def test_givenACurrentStateOfHighAndATouchDetected_whenGetNextStateIsCalled_thenTheNextLampStateShouldBeOff(self):
expected = self.OFF
actual = self.get_next_state(self.HIGH, self.TOUCH_DETECTED)
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
|
ecfaa761d65009e7a1e59e795f332783b79f2492
|
python/balcaza/activity/rstats/format.py
|
python/balcaza/activity/rstats/format.py
|
from balcaza.t2types import RExpression, TextFile
def RExpressionToString(rserve):
return rserve.code(
'''sink(text)
print(rexpr)
sink()
''',
inputs = dict(
rexpr = RExpression
),
defaultInput = 'rexpr',
outputs = dict(
text = TextFile
),
defaultOutput = 'text'
)
|
Add R print to string library
|
Add R print to string library
|
Python
|
lgpl-2.1
|
jongiddy/balcazapy,jongiddy/balcazapy,jongiddy/balcazapy
|
Add R print to string library
|
from balcaza.t2types import RExpression, TextFile
def RExpressionToString(rserve):
return rserve.code(
'''sink(text)
print(rexpr)
sink()
''',
inputs = dict(
rexpr = RExpression
),
defaultInput = 'rexpr',
outputs = dict(
text = TextFile
),
defaultOutput = 'text'
)
|
<commit_before><commit_msg>Add R print to string library<commit_after>
|
from balcaza.t2types import RExpression, TextFile
def RExpressionToString(rserve):
return rserve.code(
'''sink(text)
print(rexpr)
sink()
''',
inputs = dict(
rexpr = RExpression
),
defaultInput = 'rexpr',
outputs = dict(
text = TextFile
),
defaultOutput = 'text'
)
|
Add R print to string libraryfrom balcaza.t2types import RExpression, TextFile
def RExpressionToString(rserve):
return rserve.code(
'''sink(text)
print(rexpr)
sink()
''',
inputs = dict(
rexpr = RExpression
),
defaultInput = 'rexpr',
outputs = dict(
text = TextFile
),
defaultOutput = 'text'
)
|
<commit_before><commit_msg>Add R print to string library<commit_after>from balcaza.t2types import RExpression, TextFile
def RExpressionToString(rserve):
return rserve.code(
'''sink(text)
print(rexpr)
sink()
''',
inputs = dict(
rexpr = RExpression
),
defaultInput = 'rexpr',
outputs = dict(
text = TextFile
),
defaultOutput = 'text'
)
|
|
6b04c7a36f9a430cb2a101b267f64946b3db06b1
|
fmn/rules/fedbadges.py
|
fmn/rules/fedbadges.py
|
def fedbadges_badge_award(config, message):
""" Fedbadges: A new badge has been awarded to someone
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.badge.award')
def fedbadges_person_rank_advance(config, message):
""" Fedbadges: The rank of someone changed in the leaderboard of badges
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.person.rank.advance')
|
Add filters for the badges messages
|
Add filters for the badges messages
|
Python
|
lgpl-2.1
|
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
|
Add filters for the badges messages
|
def fedbadges_badge_award(config, message):
""" Fedbadges: A new badge has been awarded to someone
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.badge.award')
def fedbadges_person_rank_advance(config, message):
""" Fedbadges: The rank of someone changed in the leaderboard of badges
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.person.rank.advance')
|
<commit_before><commit_msg>Add filters for the badges messages<commit_after>
|
def fedbadges_badge_award(config, message):
""" Fedbadges: A new badge has been awarded to someone
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.badge.award')
def fedbadges_person_rank_advance(config, message):
""" Fedbadges: The rank of someone changed in the leaderboard of badges
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.person.rank.advance')
|
Add filters for the badges messagesdef fedbadges_badge_award(config, message):
""" Fedbadges: A new badge has been awarded to someone
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.badge.award')
def fedbadges_person_rank_advance(config, message):
""" Fedbadges: The rank of someone changed in the leaderboard of badges
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.person.rank.advance')
|
<commit_before><commit_msg>Add filters for the badges messages<commit_after>def fedbadges_badge_award(config, message):
""" Fedbadges: A new badge has been awarded to someone
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.badge.award')
def fedbadges_person_rank_advance(config, message):
""" Fedbadges: The rank of someone changed in the leaderboard of badges
TODO description for the web interface goes here
"""
return message['topic'].endswith('fedbadges.person.rank.advance')
|
|
40d5f91fc2577da74fcaec5efd4684927d1561bb
|
src/ggrc/migrations/versions/20160422143804_5599d1769f25_rename_status_field_values.py
|
src/ggrc/migrations/versions/20160422143804_5599d1769f25_rename_status_field_values.py
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: urban@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
"""
Rename status field values
Create Date: 2016-04-22 14:38:04.330718
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = "5599d1769f25"
down_revision = "33459bd8b70d"
TRANSLATION_TABLE = {
"Open": "Not Started",
"Finished": "Ready for Review",
"Final": "Completed"
}
TABLES = ["assessments", "requests"]
def upgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final", "Not Started",
"Ready for Review", "Completed") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=new_value,
old_value=old_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed") NOT NULL;""".format(
table=table))
def downgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed", "Open", "Finished", "Final") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=old_value,
old_value=new_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final") NOT NULL;""".format(
table=table))
|
Add migration to rename status enum values
|
Add migration to rename status enum values
Change status values for requests and assessments
|
Python
|
apache-2.0
|
edofic/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core
|
Add migration to rename status enum values
Change status values for requests and assessments
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: urban@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
"""
Rename status field values
Create Date: 2016-04-22 14:38:04.330718
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = "5599d1769f25"
down_revision = "33459bd8b70d"
TRANSLATION_TABLE = {
"Open": "Not Started",
"Finished": "Ready for Review",
"Final": "Completed"
}
TABLES = ["assessments", "requests"]
def upgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final", "Not Started",
"Ready for Review", "Completed") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=new_value,
old_value=old_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed") NOT NULL;""".format(
table=table))
def downgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed", "Open", "Finished", "Final") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=old_value,
old_value=new_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final") NOT NULL;""".format(
table=table))
|
<commit_before><commit_msg>Add migration to rename status enum values
Change status values for requests and assessments<commit_after>
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: urban@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
"""
Rename status field values
Create Date: 2016-04-22 14:38:04.330718
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = "5599d1769f25"
down_revision = "33459bd8b70d"
TRANSLATION_TABLE = {
"Open": "Not Started",
"Finished": "Ready for Review",
"Final": "Completed"
}
TABLES = ["assessments", "requests"]
def upgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final", "Not Started",
"Ready for Review", "Completed") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=new_value,
old_value=old_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed") NOT NULL;""".format(
table=table))
def downgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed", "Open", "Finished", "Final") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=old_value,
old_value=new_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final") NOT NULL;""".format(
table=table))
|
Add migration to rename status enum values
Change status values for requests and assessments# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: urban@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
"""
Rename status field values
Create Date: 2016-04-22 14:38:04.330718
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = "5599d1769f25"
down_revision = "33459bd8b70d"
TRANSLATION_TABLE = {
"Open": "Not Started",
"Finished": "Ready for Review",
"Final": "Completed"
}
TABLES = ["assessments", "requests"]
def upgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final", "Not Started",
"Ready for Review", "Completed") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=new_value,
old_value=old_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed") NOT NULL;""".format(
table=table))
def downgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed", "Open", "Finished", "Final") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=old_value,
old_value=new_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final") NOT NULL;""".format(
table=table))
|
<commit_before><commit_msg>Add migration to rename status enum values
Change status values for requests and assessments<commit_after># Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: urban@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
"""
Rename status field values
Create Date: 2016-04-22 14:38:04.330718
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
# revision identifiers, used by Alembic.
revision = "5599d1769f25"
down_revision = "33459bd8b70d"
TRANSLATION_TABLE = {
"Open": "Not Started",
"Finished": "Ready for Review",
"Final": "Completed"
}
TABLES = ["assessments", "requests"]
def upgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final", "Not Started",
"Ready for Review", "Completed") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=new_value,
old_value=old_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed") NOT NULL;""".format(
table=table))
def downgrade():
for table in TABLES:
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Not Started", "In Progress", "Ready for Review", "Verified",
"Completed", "Open", "Finished", "Final") NOT NULL;""".format(
table=table))
for old_value, new_value in TRANSLATION_TABLE.items():
op.execute("""
UPDATE {table} SET status="{new_value}" WHERE status="{old_value}";""".format(
table=table,
new_value=old_value,
old_value=new_value
))
op.execute("""
ALTER TABLE {table} CHANGE status status
ENUM("Open","In Progress","Finished", "Verified", "Final") NOT NULL;""".format(
table=table))
|
|
36a7061a61b5216499d3a284d6d41f1373b7e85e
|
tests/test_cli.py
|
tests/test_cli.py
|
import os
import subprocess
PROJECT_DIR = os.getcwd()
TEST_PROJECT_DIR = os.path.join(PROJECT_DIR, 'test_project')
def test_configuration_argument_in_cli():
"""Verify that's configuration option has been added to managements commands"""
os.chdir(TEST_PROJECT_DIR)
p = subprocess.Popen(['python', 'manage.py', 'test',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
p = subprocess.Popen(['python', 'manage.py', 'runserver',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
os.chdir(PROJECT_DIR)
|
Add a test for configuration argument
|
Add a test for configuration argument
This test do not use mock, and only searches the `configuration` option in
the help messages.
|
Python
|
bsd-3-clause
|
blindroot/django-configurations,cato-/django-configurations,NextHub/django-configurations,jazzband/django-configurations,nangia/django-configurations,pombredanne/django-configurations,jezdez/django-configurations,seenureddy/django-configurations,jazzband/django-configurations,incuna/django-configurations,gatherhealth/django-configurations
|
Add a test for configuration argument
This test do not use mock, and only searches the `configuration` option in
the help messages.
|
import os
import subprocess
PROJECT_DIR = os.getcwd()
TEST_PROJECT_DIR = os.path.join(PROJECT_DIR, 'test_project')
def test_configuration_argument_in_cli():
"""Verify that's configuration option has been added to managements commands"""
os.chdir(TEST_PROJECT_DIR)
p = subprocess.Popen(['python', 'manage.py', 'test',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
p = subprocess.Popen(['python', 'manage.py', 'runserver',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
os.chdir(PROJECT_DIR)
|
<commit_before><commit_msg>Add a test for configuration argument
This test do not use mock, and only searches the `configuration` option in
the help messages.<commit_after>
|
import os
import subprocess
PROJECT_DIR = os.getcwd()
TEST_PROJECT_DIR = os.path.join(PROJECT_DIR, 'test_project')
def test_configuration_argument_in_cli():
"""Verify that's configuration option has been added to managements commands"""
os.chdir(TEST_PROJECT_DIR)
p = subprocess.Popen(['python', 'manage.py', 'test',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
p = subprocess.Popen(['python', 'manage.py', 'runserver',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
os.chdir(PROJECT_DIR)
|
Add a test for configuration argument
This test do not use mock, and only searches the `configuration` option in
the help messages.import os
import subprocess
PROJECT_DIR = os.getcwd()
TEST_PROJECT_DIR = os.path.join(PROJECT_DIR, 'test_project')
def test_configuration_argument_in_cli():
"""Verify that's configuration option has been added to managements commands"""
os.chdir(TEST_PROJECT_DIR)
p = subprocess.Popen(['python', 'manage.py', 'test',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
p = subprocess.Popen(['python', 'manage.py', 'runserver',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
os.chdir(PROJECT_DIR)
|
<commit_before><commit_msg>Add a test for configuration argument
This test do not use mock, and only searches the `configuration` option in
the help messages.<commit_after>import os
import subprocess
PROJECT_DIR = os.getcwd()
TEST_PROJECT_DIR = os.path.join(PROJECT_DIR, 'test_project')
def test_configuration_argument_in_cli():
"""Verify that's configuration option has been added to managements commands"""
os.chdir(TEST_PROJECT_DIR)
p = subprocess.Popen(['python', 'manage.py', 'test',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
p = subprocess.Popen(['python', 'manage.py', 'runserver',
'--help'], stdout=subprocess.PIPE)
assert '--configuration' in p.communicate()[0].decode('UTF-8')
os.chdir(PROJECT_DIR)
|
|
09f5fb12074e419c82c76d856c208116e0f43a70
|
hkm/migrations/0035_auto_20181212_1435.py
|
hkm/migrations/0035_auto_20181212_1435.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-12 12:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0034_postal_code_to_charfield'),
]
operations = [
migrations.AlterField(
model_name='printproduct',
name='name',
field=models.CharField(choices=[(b'api-poster-gloss-30x40', 'api-poster-gloss-30x40'), (b'api-poster-gloss-40x30', 'api-poster-gloss-40x30'), (b'api-poster-30x40', 'api-poster-30x40'), (b'api-poster-40x30', 'api-poster-40x30'), (b'api-poster-50x70', 'api-poster-50x70'), (b'api-poster-70x50', 'api-poster-70x50'), (b'api-poster-gloss-A4-horizontal', 'api-poster-gloss-A4-horizontal'), (b'api-poster-gloss-A4', 'api-poster-gloss-A4')], max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='productorder',
name='postal_fees',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Shipping fees'),
),
migrations.AlterField(
model_name='userprofile',
name='printer_presets',
field=models.TextField(default=b'{"api-poster-gloss-A4": 0, "api-poster-30x40": 0, "api-poster-gloss-30x40": 0, "api-poster-gloss-40x30": 0, "api-poster-40x30": 0, "api-poster-70x50": 0, "api-poster-gloss-A4-horizontal": 0, "api-poster-50x70": 0}', verbose_name='Tulostimen presetit'),
),
]
|
Add uncommitted migration from past changes
|
Add uncommitted migration from past changes
Refs -
|
Python
|
mit
|
andersinno/kuvaselaamo,andersinno/kuvaselaamo,andersinno/kuvaselaamo
|
Add uncommitted migration from past changes
Refs -
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-12 12:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0034_postal_code_to_charfield'),
]
operations = [
migrations.AlterField(
model_name='printproduct',
name='name',
field=models.CharField(choices=[(b'api-poster-gloss-30x40', 'api-poster-gloss-30x40'), (b'api-poster-gloss-40x30', 'api-poster-gloss-40x30'), (b'api-poster-30x40', 'api-poster-30x40'), (b'api-poster-40x30', 'api-poster-40x30'), (b'api-poster-50x70', 'api-poster-50x70'), (b'api-poster-70x50', 'api-poster-70x50'), (b'api-poster-gloss-A4-horizontal', 'api-poster-gloss-A4-horizontal'), (b'api-poster-gloss-A4', 'api-poster-gloss-A4')], max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='productorder',
name='postal_fees',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Shipping fees'),
),
migrations.AlterField(
model_name='userprofile',
name='printer_presets',
field=models.TextField(default=b'{"api-poster-gloss-A4": 0, "api-poster-30x40": 0, "api-poster-gloss-30x40": 0, "api-poster-gloss-40x30": 0, "api-poster-40x30": 0, "api-poster-70x50": 0, "api-poster-gloss-A4-horizontal": 0, "api-poster-50x70": 0}', verbose_name='Tulostimen presetit'),
),
]
|
<commit_before><commit_msg>Add uncommitted migration from past changes
Refs -<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-12 12:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0034_postal_code_to_charfield'),
]
operations = [
migrations.AlterField(
model_name='printproduct',
name='name',
field=models.CharField(choices=[(b'api-poster-gloss-30x40', 'api-poster-gloss-30x40'), (b'api-poster-gloss-40x30', 'api-poster-gloss-40x30'), (b'api-poster-30x40', 'api-poster-30x40'), (b'api-poster-40x30', 'api-poster-40x30'), (b'api-poster-50x70', 'api-poster-50x70'), (b'api-poster-70x50', 'api-poster-70x50'), (b'api-poster-gloss-A4-horizontal', 'api-poster-gloss-A4-horizontal'), (b'api-poster-gloss-A4', 'api-poster-gloss-A4')], max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='productorder',
name='postal_fees',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Shipping fees'),
),
migrations.AlterField(
model_name='userprofile',
name='printer_presets',
field=models.TextField(default=b'{"api-poster-gloss-A4": 0, "api-poster-30x40": 0, "api-poster-gloss-30x40": 0, "api-poster-gloss-40x30": 0, "api-poster-40x30": 0, "api-poster-70x50": 0, "api-poster-gloss-A4-horizontal": 0, "api-poster-50x70": 0}', verbose_name='Tulostimen presetit'),
),
]
|
Add uncommitted migration from past changes
Refs -# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-12 12:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0034_postal_code_to_charfield'),
]
operations = [
migrations.AlterField(
model_name='printproduct',
name='name',
field=models.CharField(choices=[(b'api-poster-gloss-30x40', 'api-poster-gloss-30x40'), (b'api-poster-gloss-40x30', 'api-poster-gloss-40x30'), (b'api-poster-30x40', 'api-poster-30x40'), (b'api-poster-40x30', 'api-poster-40x30'), (b'api-poster-50x70', 'api-poster-50x70'), (b'api-poster-70x50', 'api-poster-70x50'), (b'api-poster-gloss-A4-horizontal', 'api-poster-gloss-A4-horizontal'), (b'api-poster-gloss-A4', 'api-poster-gloss-A4')], max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='productorder',
name='postal_fees',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Shipping fees'),
),
migrations.AlterField(
model_name='userprofile',
name='printer_presets',
field=models.TextField(default=b'{"api-poster-gloss-A4": 0, "api-poster-30x40": 0, "api-poster-gloss-30x40": 0, "api-poster-gloss-40x30": 0, "api-poster-40x30": 0, "api-poster-70x50": 0, "api-poster-gloss-A4-horizontal": 0, "api-poster-50x70": 0}', verbose_name='Tulostimen presetit'),
),
]
|
<commit_before><commit_msg>Add uncommitted migration from past changes
Refs -<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-12 12:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0034_postal_code_to_charfield'),
]
operations = [
migrations.AlterField(
model_name='printproduct',
name='name',
field=models.CharField(choices=[(b'api-poster-gloss-30x40', 'api-poster-gloss-30x40'), (b'api-poster-gloss-40x30', 'api-poster-gloss-40x30'), (b'api-poster-30x40', 'api-poster-30x40'), (b'api-poster-40x30', 'api-poster-40x30'), (b'api-poster-50x70', 'api-poster-50x70'), (b'api-poster-70x50', 'api-poster-70x50'), (b'api-poster-gloss-A4-horizontal', 'api-poster-gloss-A4-horizontal'), (b'api-poster-gloss-A4', 'api-poster-gloss-A4')], max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='productorder',
name='postal_fees',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=10, verbose_name='Shipping fees'),
),
migrations.AlterField(
model_name='userprofile',
name='printer_presets',
field=models.TextField(default=b'{"api-poster-gloss-A4": 0, "api-poster-30x40": 0, "api-poster-gloss-30x40": 0, "api-poster-gloss-40x30": 0, "api-poster-40x30": 0, "api-poster-70x50": 0, "api-poster-gloss-A4-horizontal": 0, "api-poster-50x70": 0}', verbose_name='Tulostimen presetit'),
),
]
|
|
8bf9bb5929bd73699b7daacedbed6b2e54fa36e7
|
avena/tests/test-filter.py
|
avena/tests/test-filter.py
|
#!/usr/bin/env python
from numpy import all, allclose, array, float32
from .. import filter
x = array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0],
], dtype=float32)
def test_low_pass_filter():
y = filter._low_pass_filter(x.shape, 1)
assert all(x == y)
z = filter._high_pass_filter(x.shape, 1)
assert all(z == 1.0 - y)
def test_lowpass():
y = filter._lowpass(3, x)
assert allclose(x, y)
if __name__ == '__main__':
pass
|
Add unit tests for the filter module.
|
Add unit tests for the filter module.
|
Python
|
isc
|
eliteraspberries/avena
|
Add unit tests for the filter module.
|
#!/usr/bin/env python
from numpy import all, allclose, array, float32
from .. import filter
x = array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0],
], dtype=float32)
def test_low_pass_filter():
y = filter._low_pass_filter(x.shape, 1)
assert all(x == y)
z = filter._high_pass_filter(x.shape, 1)
assert all(z == 1.0 - y)
def test_lowpass():
y = filter._lowpass(3, x)
assert allclose(x, y)
if __name__ == '__main__':
pass
|
<commit_before><commit_msg>Add unit tests for the filter module.<commit_after>
|
#!/usr/bin/env python
from numpy import all, allclose, array, float32
from .. import filter
x = array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0],
], dtype=float32)
def test_low_pass_filter():
y = filter._low_pass_filter(x.shape, 1)
assert all(x == y)
z = filter._high_pass_filter(x.shape, 1)
assert all(z == 1.0 - y)
def test_lowpass():
y = filter._lowpass(3, x)
assert allclose(x, y)
if __name__ == '__main__':
pass
|
Add unit tests for the filter module.#!/usr/bin/env python
from numpy import all, allclose, array, float32
from .. import filter
x = array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0],
], dtype=float32)
def test_low_pass_filter():
y = filter._low_pass_filter(x.shape, 1)
assert all(x == y)
z = filter._high_pass_filter(x.shape, 1)
assert all(z == 1.0 - y)
def test_lowpass():
y = filter._lowpass(3, x)
assert allclose(x, y)
if __name__ == '__main__':
pass
|
<commit_before><commit_msg>Add unit tests for the filter module.<commit_after>#!/usr/bin/env python
from numpy import all, allclose, array, float32
from .. import filter
x = array([
[0, 1, 0],
[1, 1, 1],
[0, 1, 0],
], dtype=float32)
def test_low_pass_filter():
y = filter._low_pass_filter(x.shape, 1)
assert all(x == y)
z = filter._high_pass_filter(x.shape, 1)
assert all(z == 1.0 - y)
def test_lowpass():
y = filter._lowpass(3, x)
assert allclose(x, y)
if __name__ == '__main__':
pass
|
|
fbc71aa9efc1aba4f8bd9294af24d17574b614da
|
problem_39.py
|
problem_39.py
|
from time import time
from math import pow, sqrt
LIMIT = 1000
PERIMETERS = [[0 for i in range(j)] for j in range(LIMIT/2)]
def most_common(lst):
return max(set(lst), key=lst.count)
def main():
for a in range(1, LIMIT/2):
for b in range(a+1, LIMIT/2):
c = sqrt(pow(a, 2) + pow(b, 2)) # Pythagorean theorem
perimeter = a + b + c
if c.is_integer() and perimeter < LIMIT:
PERIMETERS[b][a] = perimeter
print 'Maximum perimeter options:', most_common([item for sublist in PERIMETERS for item in sublist if item])
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
Add problem 39, right triangle perimeter options
|
Add problem 39, right triangle perimeter options
|
Python
|
mit
|
dimkarakostas/project-euler
|
Add problem 39, right triangle perimeter options
|
from time import time
from math import pow, sqrt
LIMIT = 1000
PERIMETERS = [[0 for i in range(j)] for j in range(LIMIT/2)]
def most_common(lst):
return max(set(lst), key=lst.count)
def main():
for a in range(1, LIMIT/2):
for b in range(a+1, LIMIT/2):
c = sqrt(pow(a, 2) + pow(b, 2)) # Pythagorean theorem
perimeter = a + b + c
if c.is_integer() and perimeter < LIMIT:
PERIMETERS[b][a] = perimeter
print 'Maximum perimeter options:', most_common([item for sublist in PERIMETERS for item in sublist if item])
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
<commit_before><commit_msg>Add problem 39, right triangle perimeter options<commit_after>
|
from time import time
from math import pow, sqrt
LIMIT = 1000
PERIMETERS = [[0 for i in range(j)] for j in range(LIMIT/2)]
def most_common(lst):
return max(set(lst), key=lst.count)
def main():
for a in range(1, LIMIT/2):
for b in range(a+1, LIMIT/2):
c = sqrt(pow(a, 2) + pow(b, 2)) # Pythagorean theorem
perimeter = a + b + c
if c.is_integer() and perimeter < LIMIT:
PERIMETERS[b][a] = perimeter
print 'Maximum perimeter options:', most_common([item for sublist in PERIMETERS for item in sublist if item])
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
Add problem 39, right triangle perimeter optionsfrom time import time
from math import pow, sqrt
LIMIT = 1000
PERIMETERS = [[0 for i in range(j)] for j in range(LIMIT/2)]
def most_common(lst):
return max(set(lst), key=lst.count)
def main():
for a in range(1, LIMIT/2):
for b in range(a+1, LIMIT/2):
c = sqrt(pow(a, 2) + pow(b, 2)) # Pythagorean theorem
perimeter = a + b + c
if c.is_integer() and perimeter < LIMIT:
PERIMETERS[b][a] = perimeter
print 'Maximum perimeter options:', most_common([item for sublist in PERIMETERS for item in sublist if item])
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
<commit_before><commit_msg>Add problem 39, right triangle perimeter options<commit_after>from time import time
from math import pow, sqrt
LIMIT = 1000
PERIMETERS = [[0 for i in range(j)] for j in range(LIMIT/2)]
def most_common(lst):
return max(set(lst), key=lst.count)
def main():
for a in range(1, LIMIT/2):
for b in range(a+1, LIMIT/2):
c = sqrt(pow(a, 2) + pow(b, 2)) # Pythagorean theorem
perimeter = a + b + c
if c.is_integer() and perimeter < LIMIT:
PERIMETERS[b][a] = perimeter
print 'Maximum perimeter options:', most_common([item for sublist in PERIMETERS for item in sublist if item])
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
|
|
0219b1ad7e093ffc2e7c0f455e59be0294fc6175
|
tests/test_rapids.py
|
tests/test_rapids.py
|
import unittest
import cudf
from cuml.cluster import DBSCAN
from common import gpu_test
class TestRapids(unittest.TestCase):
def test_dbscan(self):
# Create and populate a GPU DataFrame
gdf_float = cudf.DataFrame()
gdf_float['0'] = [1.0, 2.0, 5.0]
gdf_float['1'] = [4.0, 2.0, 1.0]
gdf_float['2'] = [4.0, 2.0, 1.0]
# Setup and fit clusters
dbscan_float = DBSCAN(eps=1.0, min_samples=1)
dbscan_float.fit(gdf_float)
self.assertEqual(3, dbscan_float.labels_.size)
|
Add tests for cuml & cudf to prevent regresssion.
|
Add tests for cuml & cudf to prevent regresssion.
http://b/144522678
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
Add tests for cuml & cudf to prevent regresssion.
http://b/144522678
|
import unittest
import cudf
from cuml.cluster import DBSCAN
from common import gpu_test
class TestRapids(unittest.TestCase):
def test_dbscan(self):
# Create and populate a GPU DataFrame
gdf_float = cudf.DataFrame()
gdf_float['0'] = [1.0, 2.0, 5.0]
gdf_float['1'] = [4.0, 2.0, 1.0]
gdf_float['2'] = [4.0, 2.0, 1.0]
# Setup and fit clusters
dbscan_float = DBSCAN(eps=1.0, min_samples=1)
dbscan_float.fit(gdf_float)
self.assertEqual(3, dbscan_float.labels_.size)
|
<commit_before><commit_msg>Add tests for cuml & cudf to prevent regresssion.
http://b/144522678<commit_after>
|
import unittest
import cudf
from cuml.cluster import DBSCAN
from common import gpu_test
class TestRapids(unittest.TestCase):
def test_dbscan(self):
# Create and populate a GPU DataFrame
gdf_float = cudf.DataFrame()
gdf_float['0'] = [1.0, 2.0, 5.0]
gdf_float['1'] = [4.0, 2.0, 1.0]
gdf_float['2'] = [4.0, 2.0, 1.0]
# Setup and fit clusters
dbscan_float = DBSCAN(eps=1.0, min_samples=1)
dbscan_float.fit(gdf_float)
self.assertEqual(3, dbscan_float.labels_.size)
|
Add tests for cuml & cudf to prevent regresssion.
http://b/144522678import unittest
import cudf
from cuml.cluster import DBSCAN
from common import gpu_test
class TestRapids(unittest.TestCase):
def test_dbscan(self):
# Create and populate a GPU DataFrame
gdf_float = cudf.DataFrame()
gdf_float['0'] = [1.0, 2.0, 5.0]
gdf_float['1'] = [4.0, 2.0, 1.0]
gdf_float['2'] = [4.0, 2.0, 1.0]
# Setup and fit clusters
dbscan_float = DBSCAN(eps=1.0, min_samples=1)
dbscan_float.fit(gdf_float)
self.assertEqual(3, dbscan_float.labels_.size)
|
<commit_before><commit_msg>Add tests for cuml & cudf to prevent regresssion.
http://b/144522678<commit_after>import unittest
import cudf
from cuml.cluster import DBSCAN
from common import gpu_test
class TestRapids(unittest.TestCase):
def test_dbscan(self):
# Create and populate a GPU DataFrame
gdf_float = cudf.DataFrame()
gdf_float['0'] = [1.0, 2.0, 5.0]
gdf_float['1'] = [4.0, 2.0, 1.0]
gdf_float['2'] = [4.0, 2.0, 1.0]
# Setup and fit clusters
dbscan_float = DBSCAN(eps=1.0, min_samples=1)
dbscan_float.fit(gdf_float)
self.assertEqual(3, dbscan_float.labels_.size)
|
|
66f05b105e9330dc213bc76c25f8c3b569dad3be
|
problem_2/solution.py
|
problem_2/solution.py
|
f1, f2, s, n = 0, 1, 0, 4000000
while f2 < n:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
print s
|
Add Python implementation for problem 2
|
Add Python implementation for problem 2
|
Python
|
mit
|
mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler
|
Add Python implementation for problem 2
|
f1, f2, s, n = 0, 1, 0, 4000000
while f2 < n:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
print s
|
<commit_before><commit_msg>Add Python implementation for problem 2<commit_after>
|
f1, f2, s, n = 0, 1, 0, 4000000
while f2 < n:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
print s
|
Add Python implementation for problem 2f1, f2, s, n = 0, 1, 0, 4000000
while f2 < n:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
print s
|
<commit_before><commit_msg>Add Python implementation for problem 2<commit_after>f1, f2, s, n = 0, 1, 0, 4000000
while f2 < n:
f2, f1 = f1, f1 + f2
if f2 % 2 == 0:
s += f2
print s
|
|
f5544c1902d38b674d319f19b3d2c3f02ca54342
|
hanzidefs.py
|
hanzidefs.py
|
import collections
import sys
from CEDICT_Parser import parser, pinyin
usage = "Usage: python hanzidefs.py cedict_ts.u8 inputtext.txt > outputdefs.tsv"
def get_all_hanzi(txt):
# count all the characters in CJK range
counts = collections.Counter([c for c in txt if '\u4e00' <= c <= '\u9fff'])
sys.stderr.write("%d uniq chars found\n" % len(counts))
# return characters from most to least common
return [hz for hz, _ in counts.most_common()]
def def2field(definition):
"""Given a parsed definition object {hanzi: _, pinyin: _, def: _}, generate
the formatted text field to show up in the output."""
return """{pronounce}: {meaning}""".format(
pronounce=pinyin.convert(definition['pinyin']),
meaning=definition['def'])
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.stderr.write(usage)
sys.exit(1)
sys.stderr.write("Parsing CEDICT...\n")
sys.stderr.flush()
cedict = parser.read_file(sys.argv[1])
sys.stderr.write("Reading source text...\n")
sys.stderr.flush()
with open(sys.argv[2], 'r') as f:
hanzi = get_all_hanzi(f.read())
sys.stderr.write("Generating defs...\n")
sys.stderr.flush()
for ch in hanzi:
defs = cedict[ch]
def1 = def2 = def3 = ""
if not defs:
def1 = "Unknown"
else:
def1 = def2field(defs[0])
if len(defs) >= 2:
def2 = def2field(defs[1])
if len(defs) >= 3:
def3 = def2field(defs[2])
print("""%s\t%s\t%s\t%s""" % (ch, def1, def2, def3))
|
Write script to print defs
|
Write script to print defs
|
Python
|
agpl-3.0
|
erjiang/hanzidefs
|
Write script to print defs
|
import collections
import sys
from CEDICT_Parser import parser, pinyin
usage = "Usage: python hanzidefs.py cedict_ts.u8 inputtext.txt > outputdefs.tsv"
def get_all_hanzi(txt):
# count all the characters in CJK range
counts = collections.Counter([c for c in txt if '\u4e00' <= c <= '\u9fff'])
sys.stderr.write("%d uniq chars found\n" % len(counts))
# return characters from most to least common
return [hz for hz, _ in counts.most_common()]
def def2field(definition):
"""Given a parsed definition object {hanzi: _, pinyin: _, def: _}, generate
the formatted text field to show up in the output."""
return """{pronounce}: {meaning}""".format(
pronounce=pinyin.convert(definition['pinyin']),
meaning=definition['def'])
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.stderr.write(usage)
sys.exit(1)
sys.stderr.write("Parsing CEDICT...\n")
sys.stderr.flush()
cedict = parser.read_file(sys.argv[1])
sys.stderr.write("Reading source text...\n")
sys.stderr.flush()
with open(sys.argv[2], 'r') as f:
hanzi = get_all_hanzi(f.read())
sys.stderr.write("Generating defs...\n")
sys.stderr.flush()
for ch in hanzi:
defs = cedict[ch]
def1 = def2 = def3 = ""
if not defs:
def1 = "Unknown"
else:
def1 = def2field(defs[0])
if len(defs) >= 2:
def2 = def2field(defs[1])
if len(defs) >= 3:
def3 = def2field(defs[2])
print("""%s\t%s\t%s\t%s""" % (ch, def1, def2, def3))
|
<commit_before><commit_msg>Write script to print defs<commit_after>
|
import collections
import sys
from CEDICT_Parser import parser, pinyin
usage = "Usage: python hanzidefs.py cedict_ts.u8 inputtext.txt > outputdefs.tsv"
def get_all_hanzi(txt):
# count all the characters in CJK range
counts = collections.Counter([c for c in txt if '\u4e00' <= c <= '\u9fff'])
sys.stderr.write("%d uniq chars found\n" % len(counts))
# return characters from most to least common
return [hz for hz, _ in counts.most_common()]
def def2field(definition):
"""Given a parsed definition object {hanzi: _, pinyin: _, def: _}, generate
the formatted text field to show up in the output."""
return """{pronounce}: {meaning}""".format(
pronounce=pinyin.convert(definition['pinyin']),
meaning=definition['def'])
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.stderr.write(usage)
sys.exit(1)
sys.stderr.write("Parsing CEDICT...\n")
sys.stderr.flush()
cedict = parser.read_file(sys.argv[1])
sys.stderr.write("Reading source text...\n")
sys.stderr.flush()
with open(sys.argv[2], 'r') as f:
hanzi = get_all_hanzi(f.read())
sys.stderr.write("Generating defs...\n")
sys.stderr.flush()
for ch in hanzi:
defs = cedict[ch]
def1 = def2 = def3 = ""
if not defs:
def1 = "Unknown"
else:
def1 = def2field(defs[0])
if len(defs) >= 2:
def2 = def2field(defs[1])
if len(defs) >= 3:
def3 = def2field(defs[2])
print("""%s\t%s\t%s\t%s""" % (ch, def1, def2, def3))
|
Write script to print defsimport collections
import sys
from CEDICT_Parser import parser, pinyin
usage = "Usage: python hanzidefs.py cedict_ts.u8 inputtext.txt > outputdefs.tsv"
def get_all_hanzi(txt):
# count all the characters in CJK range
counts = collections.Counter([c for c in txt if '\u4e00' <= c <= '\u9fff'])
sys.stderr.write("%d uniq chars found\n" % len(counts))
# return characters from most to least common
return [hz for hz, _ in counts.most_common()]
def def2field(definition):
"""Given a parsed definition object {hanzi: _, pinyin: _, def: _}, generate
the formatted text field to show up in the output."""
return """{pronounce}: {meaning}""".format(
pronounce=pinyin.convert(definition['pinyin']),
meaning=definition['def'])
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.stderr.write(usage)
sys.exit(1)
sys.stderr.write("Parsing CEDICT...\n")
sys.stderr.flush()
cedict = parser.read_file(sys.argv[1])
sys.stderr.write("Reading source text...\n")
sys.stderr.flush()
with open(sys.argv[2], 'r') as f:
hanzi = get_all_hanzi(f.read())
sys.stderr.write("Generating defs...\n")
sys.stderr.flush()
for ch in hanzi:
defs = cedict[ch]
def1 = def2 = def3 = ""
if not defs:
def1 = "Unknown"
else:
def1 = def2field(defs[0])
if len(defs) >= 2:
def2 = def2field(defs[1])
if len(defs) >= 3:
def3 = def2field(defs[2])
print("""%s\t%s\t%s\t%s""" % (ch, def1, def2, def3))
|
<commit_before><commit_msg>Write script to print defs<commit_after>import collections
import sys
from CEDICT_Parser import parser, pinyin
usage = "Usage: python hanzidefs.py cedict_ts.u8 inputtext.txt > outputdefs.tsv"
def get_all_hanzi(txt):
# count all the characters in CJK range
counts = collections.Counter([c for c in txt if '\u4e00' <= c <= '\u9fff'])
sys.stderr.write("%d uniq chars found\n" % len(counts))
# return characters from most to least common
return [hz for hz, _ in counts.most_common()]
def def2field(definition):
"""Given a parsed definition object {hanzi: _, pinyin: _, def: _}, generate
the formatted text field to show up in the output."""
return """{pronounce}: {meaning}""".format(
pronounce=pinyin.convert(definition['pinyin']),
meaning=definition['def'])
if __name__ == "__main__":
if len(sys.argv) < 3:
sys.stderr.write(usage)
sys.exit(1)
sys.stderr.write("Parsing CEDICT...\n")
sys.stderr.flush()
cedict = parser.read_file(sys.argv[1])
sys.stderr.write("Reading source text...\n")
sys.stderr.flush()
with open(sys.argv[2], 'r') as f:
hanzi = get_all_hanzi(f.read())
sys.stderr.write("Generating defs...\n")
sys.stderr.flush()
for ch in hanzi:
defs = cedict[ch]
def1 = def2 = def3 = ""
if not defs:
def1 = "Unknown"
else:
def1 = def2field(defs[0])
if len(defs) >= 2:
def2 = def2field(defs[1])
if len(defs) >= 3:
def3 = def2field(defs[2])
print("""%s\t%s\t%s\t%s""" % (ch, def1, def2, def3))
|
|
eec83626f31015bf4729de77dd637d08e7cc34ff
|
comics/comics/lunchdn.py
|
comics/comics/lunchdn.py
|
# encoding: utf-8
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (dn.no)"
language = "no"
url = "https://www.dn.no/topic/Lunch/"
active = True
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_days = 21 # 3 weeks
schedule = "Fr"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
page = self.parse_page(ComicData.url)
page_url = page.root.xpath(
'//time[@datetime="%s"]/../a/@href' % pub_date.strftime("%Y-%m-%d")
)
if not page_url:
return
release_page = self.parse_page(page_url[0])
image = release_page.root.xpath('//meta[@itemprop="image"]')
if not image:
return
url = image[0].get("content")
return CrawlerImage(url)
|
Add crawler for "Lunch" from dn.no
|
Add crawler for "Lunch" from dn.no
|
Python
|
agpl-3.0
|
jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics
|
Add crawler for "Lunch" from dn.no
|
# encoding: utf-8
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (dn.no)"
language = "no"
url = "https://www.dn.no/topic/Lunch/"
active = True
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_days = 21 # 3 weeks
schedule = "Fr"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
page = self.parse_page(ComicData.url)
page_url = page.root.xpath(
'//time[@datetime="%s"]/../a/@href' % pub_date.strftime("%Y-%m-%d")
)
if not page_url:
return
release_page = self.parse_page(page_url[0])
image = release_page.root.xpath('//meta[@itemprop="image"]')
if not image:
return
url = image[0].get("content")
return CrawlerImage(url)
|
<commit_before><commit_msg>Add crawler for "Lunch" from dn.no<commit_after>
|
# encoding: utf-8
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (dn.no)"
language = "no"
url = "https://www.dn.no/topic/Lunch/"
active = True
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_days = 21 # 3 weeks
schedule = "Fr"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
page = self.parse_page(ComicData.url)
page_url = page.root.xpath(
'//time[@datetime="%s"]/../a/@href' % pub_date.strftime("%Y-%m-%d")
)
if not page_url:
return
release_page = self.parse_page(page_url[0])
image = release_page.root.xpath('//meta[@itemprop="image"]')
if not image:
return
url = image[0].get("content")
return CrawlerImage(url)
|
Add crawler for "Lunch" from dn.no# encoding: utf-8
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (dn.no)"
language = "no"
url = "https://www.dn.no/topic/Lunch/"
active = True
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_days = 21 # 3 weeks
schedule = "Fr"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
page = self.parse_page(ComicData.url)
page_url = page.root.xpath(
'//time[@datetime="%s"]/../a/@href' % pub_date.strftime("%Y-%m-%d")
)
if not page_url:
return
release_page = self.parse_page(page_url[0])
image = release_page.root.xpath('//meta[@itemprop="image"]')
if not image:
return
url = image[0].get("content")
return CrawlerImage(url)
|
<commit_before><commit_msg>Add crawler for "Lunch" from dn.no<commit_after># encoding: utf-8
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (dn.no)"
language = "no"
url = "https://www.dn.no/topic/Lunch/"
active = True
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_days = 21 # 3 weeks
schedule = "Fr"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
page = self.parse_page(ComicData.url)
page_url = page.root.xpath(
'//time[@datetime="%s"]/../a/@href' % pub_date.strftime("%Y-%m-%d")
)
if not page_url:
return
release_page = self.parse_page(page_url[0])
image = release_page.root.xpath('//meta[@itemprop="image"]')
if not image:
return
url = image[0].get("content")
return CrawlerImage(url)
|
|
675698600b600a27d02b29b91a0fc4a30e03de1a
|
python/sieve_atkin.py
|
python/sieve_atkin.py
|
import sys
import math
args = sys.argv
N = len(args) > 1 and int(sys.argv[1]) or 1000
nsqrt = int((math.pow(N, 0.5)))
is_prime = [False] * N
for x in xrange(1, nsqrt):
for y in xrange(1, nsqrt):
n = 4 * (x * x) + (y * y)
if n <= N and n % 12 in [1,5]:
is_prime[n] = True
n = 3 * (x * x) + (y * y)
if n <= N and n % 12 is 7:
is_prime[n] = True
n = 3 * (x * x) - (y * y)
if n <= N and x > y and n % 12 is 11:
is_prime[n] = True
is_prime[2] = True
is_prime[3] = True
primes = []
for k, v in enumerate(is_prime):
v and primes.append(k)
print(len(primes))
|
Add sieve of atkin in python
|
Add sieve of atkin in python
|
Python
|
mit
|
Zorbash/linguaphone,Zorbash/linguaphone,Zorbash/linguaphone
|
Add sieve of atkin in python
|
import sys
import math
args = sys.argv
N = len(args) > 1 and int(sys.argv[1]) or 1000
nsqrt = int((math.pow(N, 0.5)))
is_prime = [False] * N
for x in xrange(1, nsqrt):
for y in xrange(1, nsqrt):
n = 4 * (x * x) + (y * y)
if n <= N and n % 12 in [1,5]:
is_prime[n] = True
n = 3 * (x * x) + (y * y)
if n <= N and n % 12 is 7:
is_prime[n] = True
n = 3 * (x * x) - (y * y)
if n <= N and x > y and n % 12 is 11:
is_prime[n] = True
is_prime[2] = True
is_prime[3] = True
primes = []
for k, v in enumerate(is_prime):
v and primes.append(k)
print(len(primes))
|
<commit_before><commit_msg>Add sieve of atkin in python<commit_after>
|
import sys
import math
args = sys.argv
N = len(args) > 1 and int(sys.argv[1]) or 1000
nsqrt = int((math.pow(N, 0.5)))
is_prime = [False] * N
for x in xrange(1, nsqrt):
for y in xrange(1, nsqrt):
n = 4 * (x * x) + (y * y)
if n <= N and n % 12 in [1,5]:
is_prime[n] = True
n = 3 * (x * x) + (y * y)
if n <= N and n % 12 is 7:
is_prime[n] = True
n = 3 * (x * x) - (y * y)
if n <= N and x > y and n % 12 is 11:
is_prime[n] = True
is_prime[2] = True
is_prime[3] = True
primes = []
for k, v in enumerate(is_prime):
v and primes.append(k)
print(len(primes))
|
Add sieve of atkin in pythonimport sys
import math
args = sys.argv
N = len(args) > 1 and int(sys.argv[1]) or 1000
nsqrt = int((math.pow(N, 0.5)))
is_prime = [False] * N
for x in xrange(1, nsqrt):
for y in xrange(1, nsqrt):
n = 4 * (x * x) + (y * y)
if n <= N and n % 12 in [1,5]:
is_prime[n] = True
n = 3 * (x * x) + (y * y)
if n <= N and n % 12 is 7:
is_prime[n] = True
n = 3 * (x * x) - (y * y)
if n <= N and x > y and n % 12 is 11:
is_prime[n] = True
is_prime[2] = True
is_prime[3] = True
primes = []
for k, v in enumerate(is_prime):
v and primes.append(k)
print(len(primes))
|
<commit_before><commit_msg>Add sieve of atkin in python<commit_after>import sys
import math
args = sys.argv
N = len(args) > 1 and int(sys.argv[1]) or 1000
nsqrt = int((math.pow(N, 0.5)))
is_prime = [False] * N
for x in xrange(1, nsqrt):
for y in xrange(1, nsqrt):
n = 4 * (x * x) + (y * y)
if n <= N and n % 12 in [1,5]:
is_prime[n] = True
n = 3 * (x * x) + (y * y)
if n <= N and n % 12 is 7:
is_prime[n] = True
n = 3 * (x * x) - (y * y)
if n <= N and x > y and n % 12 is 11:
is_prime[n] = True
is_prime[2] = True
is_prime[3] = True
primes = []
for k, v in enumerate(is_prime):
v and primes.append(k)
print(len(primes))
|
|
14aa8b554165efff430d4b1d5c04aa9ec14edb6c
|
tests/api/views/about_test.py
|
tests/api/views/about_test.py
|
def test_imprint(app, client):
app.config['SKYLINES_IMPRINT'] = u'foobar'
res = client.get('/imprint')
assert res.status_code == 200
assert res.json == {
u'content': u'foobar',
}
def test_team(client):
res = client.get('/team')
assert res.status_code == 200
content = res.json['content']
assert '## Developers' in content
assert '* Tobias Bieniek (<tobias.bieniek@gmx.de> // maintainer)\n' in content
assert '## Developers' in content
def test_license(client):
res = client.get('/license')
assert res.status_code == 200
content = res.json['content']
assert 'GNU AFFERO GENERAL PUBLIC LICENSE' in content
|
Add tests for "about" views
|
tests/api: Add tests for "about" views
|
Python
|
agpl-3.0
|
skylines-project/skylines,Harry-R/skylines,Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,Turbo87/skylines,shadowoneau/skylines,shadowoneau/skylines,Harry-R/skylines,shadowoneau/skylines,skylines-project/skylines,RBE-Avionik/skylines,Turbo87/skylines,RBE-Avionik/skylines,shadowoneau/skylines,RBE-Avionik/skylines,Turbo87/skylines,skylines-project/skylines,Harry-R/skylines,RBE-Avionik/skylines
|
tests/api: Add tests for "about" views
|
def test_imprint(app, client):
app.config['SKYLINES_IMPRINT'] = u'foobar'
res = client.get('/imprint')
assert res.status_code == 200
assert res.json == {
u'content': u'foobar',
}
def test_team(client):
res = client.get('/team')
assert res.status_code == 200
content = res.json['content']
assert '## Developers' in content
assert '* Tobias Bieniek (<tobias.bieniek@gmx.de> // maintainer)\n' in content
assert '## Developers' in content
def test_license(client):
res = client.get('/license')
assert res.status_code == 200
content = res.json['content']
assert 'GNU AFFERO GENERAL PUBLIC LICENSE' in content
|
<commit_before><commit_msg>tests/api: Add tests for "about" views<commit_after>
|
def test_imprint(app, client):
app.config['SKYLINES_IMPRINT'] = u'foobar'
res = client.get('/imprint')
assert res.status_code == 200
assert res.json == {
u'content': u'foobar',
}
def test_team(client):
res = client.get('/team')
assert res.status_code == 200
content = res.json['content']
assert '## Developers' in content
assert '* Tobias Bieniek (<tobias.bieniek@gmx.de> // maintainer)\n' in content
assert '## Developers' in content
def test_license(client):
res = client.get('/license')
assert res.status_code == 200
content = res.json['content']
assert 'GNU AFFERO GENERAL PUBLIC LICENSE' in content
|
tests/api: Add tests for "about" viewsdef test_imprint(app, client):
app.config['SKYLINES_IMPRINT'] = u'foobar'
res = client.get('/imprint')
assert res.status_code == 200
assert res.json == {
u'content': u'foobar',
}
def test_team(client):
res = client.get('/team')
assert res.status_code == 200
content = res.json['content']
assert '## Developers' in content
assert '* Tobias Bieniek (<tobias.bieniek@gmx.de> // maintainer)\n' in content
assert '## Developers' in content
def test_license(client):
res = client.get('/license')
assert res.status_code == 200
content = res.json['content']
assert 'GNU AFFERO GENERAL PUBLIC LICENSE' in content
|
<commit_before><commit_msg>tests/api: Add tests for "about" views<commit_after>def test_imprint(app, client):
app.config['SKYLINES_IMPRINT'] = u'foobar'
res = client.get('/imprint')
assert res.status_code == 200
assert res.json == {
u'content': u'foobar',
}
def test_team(client):
res = client.get('/team')
assert res.status_code == 200
content = res.json['content']
assert '## Developers' in content
assert '* Tobias Bieniek (<tobias.bieniek@gmx.de> // maintainer)\n' in content
assert '## Developers' in content
def test_license(client):
res = client.get('/license')
assert res.status_code == 200
content = res.json['content']
assert 'GNU AFFERO GENERAL PUBLIC LICENSE' in content
|
|
864cf50cc543f53b21fced877baa3bdf4f582997
|
lazyblacksmith/utils/time.py
|
lazyblacksmith/utils/time.py
|
# -*- encoding: utf-8 -*-
import pytz
from datetime import datetime
def utcnow():
utc_now = datetime.utcnow()
utc_now = utc_now.replace(tzinfo=pytz.utc)
return utc_now
|
Add util function for utcnow with pytz
|
Add util function for utcnow with pytz
|
Python
|
bsd-3-clause
|
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
|
Add util function for utcnow with pytz
|
# -*- encoding: utf-8 -*-
import pytz
from datetime import datetime
def utcnow():
utc_now = datetime.utcnow()
utc_now = utc_now.replace(tzinfo=pytz.utc)
return utc_now
|
<commit_before><commit_msg>Add util function for utcnow with pytz<commit_after>
|
# -*- encoding: utf-8 -*-
import pytz
from datetime import datetime
def utcnow():
utc_now = datetime.utcnow()
utc_now = utc_now.replace(tzinfo=pytz.utc)
return utc_now
|
Add util function for utcnow with pytz# -*- encoding: utf-8 -*-
import pytz
from datetime import datetime
def utcnow():
utc_now = datetime.utcnow()
utc_now = utc_now.replace(tzinfo=pytz.utc)
return utc_now
|
<commit_before><commit_msg>Add util function for utcnow with pytz<commit_after># -*- encoding: utf-8 -*-
import pytz
from datetime import datetime
def utcnow():
utc_now = datetime.utcnow()
utc_now = utc_now.replace(tzinfo=pytz.utc)
return utc_now
|
|
46b63025f982062af178dcbaa1f5a6843895371f
|
bandoleers/args.py
|
bandoleers/args.py
|
import argparse
import logging
import sys
import bandoleers
class ArgumentParser(argparse.ArgumentParser):
"""
Implements some common command-line behaviors.
This is a slightly extended version of the standard
:class:`argparse.ArgumentParser` class that does three
things for you:
* exits with a non-zero status when help is shown
* implements --quiet and --verbose mutually exclusive
options that call :meth:`logging.Logger.setLevel` on
the root logger
* adds a --version flag
"""
def __init__(self, *args, **kwargs):
super(ArgumentParser, self).__init__(*args, **kwargs)
output_control = self.add_mutually_exclusive_group()
output_control.add_argument('-q', '--quiet',
action='store_true', default=False,
help='disable non-failure output')
output_control.add_argument('-v', '--verbose',
action='store_true', default=False,
help='show diagnostic output')
self.add_argument('--version', action='version',
version='%(prog)s {}'.format(bandoleers.__version__))
def parse_args(self, *args, **kwargs):
result = super(ArgumentParser, self).parse_args(*args, **kwargs)
if result.verbose:
logging.getLogger().setLevel(logging.DEBUG)
if result.quiet:
logging.getLogger().setLevel(logging.ERROR)
return result
def print_usage(self, file=None):
stream = file or sys.stdout
stream.write(self.format_usage())
sys.exit(64)
def print_help(self, file=None):
stream = file or sys.stdout
stream.write(self.format_help())
sys.exit(64)
|
Add extended version of argparse.ArgumentParser.
|
Add extended version of argparse.ArgumentParser.
|
Python
|
bsd-3-clause
|
aweber/bandoleers
|
Add extended version of argparse.ArgumentParser.
|
import argparse
import logging
import sys
import bandoleers
class ArgumentParser(argparse.ArgumentParser):
"""
Implements some common command-line behaviors.
This is a slightly extended version of the standard
:class:`argparse.ArgumentParser` class that does three
things for you:
* exits with a non-zero status when help is shown
* implements --quiet and --verbose mutually exclusive
options that call :meth:`logging.Logger.setLevel` on
the root logger
* adds a --version flag
"""
def __init__(self, *args, **kwargs):
super(ArgumentParser, self).__init__(*args, **kwargs)
output_control = self.add_mutually_exclusive_group()
output_control.add_argument('-q', '--quiet',
action='store_true', default=False,
help='disable non-failure output')
output_control.add_argument('-v', '--verbose',
action='store_true', default=False,
help='show diagnostic output')
self.add_argument('--version', action='version',
version='%(prog)s {}'.format(bandoleers.__version__))
def parse_args(self, *args, **kwargs):
result = super(ArgumentParser, self).parse_args(*args, **kwargs)
if result.verbose:
logging.getLogger().setLevel(logging.DEBUG)
if result.quiet:
logging.getLogger().setLevel(logging.ERROR)
return result
def print_usage(self, file=None):
stream = file or sys.stdout
stream.write(self.format_usage())
sys.exit(64)
def print_help(self, file=None):
stream = file or sys.stdout
stream.write(self.format_help())
sys.exit(64)
|
<commit_before><commit_msg>Add extended version of argparse.ArgumentParser.<commit_after>
|
import argparse
import logging
import sys
import bandoleers
class ArgumentParser(argparse.ArgumentParser):
"""
Implements some common command-line behaviors.
This is a slightly extended version of the standard
:class:`argparse.ArgumentParser` class that does three
things for you:
* exits with a non-zero status when help is shown
* implements --quiet and --verbose mutually exclusive
options that call :meth:`logging.Logger.setLevel` on
the root logger
* adds a --version flag
"""
def __init__(self, *args, **kwargs):
super(ArgumentParser, self).__init__(*args, **kwargs)
output_control = self.add_mutually_exclusive_group()
output_control.add_argument('-q', '--quiet',
action='store_true', default=False,
help='disable non-failure output')
output_control.add_argument('-v', '--verbose',
action='store_true', default=False,
help='show diagnostic output')
self.add_argument('--version', action='version',
version='%(prog)s {}'.format(bandoleers.__version__))
def parse_args(self, *args, **kwargs):
result = super(ArgumentParser, self).parse_args(*args, **kwargs)
if result.verbose:
logging.getLogger().setLevel(logging.DEBUG)
if result.quiet:
logging.getLogger().setLevel(logging.ERROR)
return result
def print_usage(self, file=None):
stream = file or sys.stdout
stream.write(self.format_usage())
sys.exit(64)
def print_help(self, file=None):
stream = file or sys.stdout
stream.write(self.format_help())
sys.exit(64)
|
Add extended version of argparse.ArgumentParser.import argparse
import logging
import sys
import bandoleers
class ArgumentParser(argparse.ArgumentParser):
"""
Implements some common command-line behaviors.
This is a slightly extended version of the standard
:class:`argparse.ArgumentParser` class that does three
things for you:
* exits with a non-zero status when help is shown
* implements --quiet and --verbose mutually exclusive
options that call :meth:`logging.Logger.setLevel` on
the root logger
* adds a --version flag
"""
def __init__(self, *args, **kwargs):
super(ArgumentParser, self).__init__(*args, **kwargs)
output_control = self.add_mutually_exclusive_group()
output_control.add_argument('-q', '--quiet',
action='store_true', default=False,
help='disable non-failure output')
output_control.add_argument('-v', '--verbose',
action='store_true', default=False,
help='show diagnostic output')
self.add_argument('--version', action='version',
version='%(prog)s {}'.format(bandoleers.__version__))
def parse_args(self, *args, **kwargs):
result = super(ArgumentParser, self).parse_args(*args, **kwargs)
if result.verbose:
logging.getLogger().setLevel(logging.DEBUG)
if result.quiet:
logging.getLogger().setLevel(logging.ERROR)
return result
def print_usage(self, file=None):
stream = file or sys.stdout
stream.write(self.format_usage())
sys.exit(64)
def print_help(self, file=None):
stream = file or sys.stdout
stream.write(self.format_help())
sys.exit(64)
|
<commit_before><commit_msg>Add extended version of argparse.ArgumentParser.<commit_after>import argparse
import logging
import sys
import bandoleers
class ArgumentParser(argparse.ArgumentParser):
"""
Implements some common command-line behaviors.
This is a slightly extended version of the standard
:class:`argparse.ArgumentParser` class that does three
things for you:
* exits with a non-zero status when help is shown
* implements --quiet and --verbose mutually exclusive
options that call :meth:`logging.Logger.setLevel` on
the root logger
* adds a --version flag
"""
def __init__(self, *args, **kwargs):
super(ArgumentParser, self).__init__(*args, **kwargs)
output_control = self.add_mutually_exclusive_group()
output_control.add_argument('-q', '--quiet',
action='store_true', default=False,
help='disable non-failure output')
output_control.add_argument('-v', '--verbose',
action='store_true', default=False,
help='show diagnostic output')
self.add_argument('--version', action='version',
version='%(prog)s {}'.format(bandoleers.__version__))
def parse_args(self, *args, **kwargs):
result = super(ArgumentParser, self).parse_args(*args, **kwargs)
if result.verbose:
logging.getLogger().setLevel(logging.DEBUG)
if result.quiet:
logging.getLogger().setLevel(logging.ERROR)
return result
def print_usage(self, file=None):
stream = file or sys.stdout
stream.write(self.format_usage())
sys.exit(64)
def print_help(self, file=None):
stream = file or sys.stdout
stream.write(self.format_help())
sys.exit(64)
|
|
0607730872442207574d166649b0dd6e0ce78509
|
analytics/nltk_collocations.py
|
analytics/nltk_collocations.py
|
#!/usr/bin/env python3
"""
This receives text from stdin and does a collocation analysis
"""
import sys
import nltk
from nltk.collocations import *
def read_lines_stdin():
lines = []
for line in sys.stdin:
lines.append(line)
return lines
if __name__ == '__main__':
tweets = read_lines_stdin()
tokens = nltk.wordpunct_tokenize(" ".join(tweets))
print("Trigrams - 3 words with more than 3 characters each occuring together min. 3x in full text:")
trigram_measures = nltk.collocations.TrigramAssocMeasures()
finder = TrigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(trigram_measures.pmi, 20))
print()
print("Bigrams - 2 words with more than 3 characters each occuring together min. 3x in full text:")
bigram_measures = nltk.collocations.BigramAssocMeasures()
finder = BigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(bigram_measures.pmi, 20))
print()
|
Add bigram and trigram analyzer
|
[F] Add bigram and trigram analyzer
|
Python
|
mit
|
suchkultur/trumpeltier
|
[F] Add bigram and trigram analyzer
|
#!/usr/bin/env python3
"""
This receives text from stdin and does a collocation analysis
"""
import sys
import nltk
from nltk.collocations import *
def read_lines_stdin():
lines = []
for line in sys.stdin:
lines.append(line)
return lines
if __name__ == '__main__':
tweets = read_lines_stdin()
tokens = nltk.wordpunct_tokenize(" ".join(tweets))
print("Trigrams - 3 words with more than 3 characters each occuring together min. 3x in full text:")
trigram_measures = nltk.collocations.TrigramAssocMeasures()
finder = TrigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(trigram_measures.pmi, 20))
print()
print("Bigrams - 2 words with more than 3 characters each occuring together min. 3x in full text:")
bigram_measures = nltk.collocations.BigramAssocMeasures()
finder = BigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(bigram_measures.pmi, 20))
print()
|
<commit_before><commit_msg>[F] Add bigram and trigram analyzer<commit_after>
|
#!/usr/bin/env python3
"""
This receives text from stdin and does a collocation analysis
"""
import sys
import nltk
from nltk.collocations import *
def read_lines_stdin():
lines = []
for line in sys.stdin:
lines.append(line)
return lines
if __name__ == '__main__':
tweets = read_lines_stdin()
tokens = nltk.wordpunct_tokenize(" ".join(tweets))
print("Trigrams - 3 words with more than 3 characters each occuring together min. 3x in full text:")
trigram_measures = nltk.collocations.TrigramAssocMeasures()
finder = TrigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(trigram_measures.pmi, 20))
print()
print("Bigrams - 2 words with more than 3 characters each occuring together min. 3x in full text:")
bigram_measures = nltk.collocations.BigramAssocMeasures()
finder = BigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(bigram_measures.pmi, 20))
print()
|
[F] Add bigram and trigram analyzer#!/usr/bin/env python3
"""
This receives text from stdin and does a collocation analysis
"""
import sys
import nltk
from nltk.collocations import *
def read_lines_stdin():
lines = []
for line in sys.stdin:
lines.append(line)
return lines
if __name__ == '__main__':
tweets = read_lines_stdin()
tokens = nltk.wordpunct_tokenize(" ".join(tweets))
print("Trigrams - 3 words with more than 3 characters each occuring together min. 3x in full text:")
trigram_measures = nltk.collocations.TrigramAssocMeasures()
finder = TrigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(trigram_measures.pmi, 20))
print()
print("Bigrams - 2 words with more than 3 characters each occuring together min. 3x in full text:")
bigram_measures = nltk.collocations.BigramAssocMeasures()
finder = BigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(bigram_measures.pmi, 20))
print()
|
<commit_before><commit_msg>[F] Add bigram and trigram analyzer<commit_after>#!/usr/bin/env python3
"""
This receives text from stdin and does a collocation analysis
"""
import sys
import nltk
from nltk.collocations import *
def read_lines_stdin():
lines = []
for line in sys.stdin:
lines.append(line)
return lines
if __name__ == '__main__':
tweets = read_lines_stdin()
tokens = nltk.wordpunct_tokenize(" ".join(tweets))
print("Trigrams - 3 words with more than 3 characters each occuring together min. 3x in full text:")
trigram_measures = nltk.collocations.TrigramAssocMeasures()
finder = TrigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(trigram_measures.pmi, 20))
print()
print("Bigrams - 2 words with more than 3 characters each occuring together min. 3x in full text:")
bigram_measures = nltk.collocations.BigramAssocMeasures()
finder = BigramCollocationFinder.from_words(tokens)
finder.apply_word_filter(lambda w: len(w) < 3)
finder.apply_freq_filter(3)
print(finder.nbest(bigram_measures.pmi, 20))
print()
|
|
330a938ea62e680b4aff2378e1e29b564f9049a1
|
python/vyos/authutils.py
|
python/vyos/authutils.py
|
# authutils -- miscelanneous functions for handling passwords and publis keys
#
# Copyright (C) 2018 VyOS maintainers and contributors
#
# This library is free software; you can redistribute it and/or modify it under the terms of
# the GNU Lesser General Public License as published by the Free Software Foundation;
# either version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this library;
# if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import re
from subprocess import Popen, PIPE, STDOUT
def make_password_hash(password):
""" Makes a password hash for /etc/shadow using mkpasswd """
mkpasswd = Popen(['mkpasswd', '--method=sha-512', '--stdin'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
hash = mkpasswd.communicate(input=password.encode(), timeout=5)[0].decode().strip()
return hash
def split_ssh_public_key(key_string, defaultname=""):
""" Splits an SSH public key into its components """
key_string = key_string.strip()
parts = re.split(r'\s+', key_string)
if len(parts) == 3:
key_type, key_data, key_name = parts[0], parts[1], parts[2]
else:
key_type, key_data, key_name = parts[0], parts[1], defaultname
if key_type not in ['ssh-rsa', 'ssh-dss', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519']:
raise ValueError("Bad key type \'{0}\', must be one of must be one of ssh-rsa, ssh-dss, ecdsa-sha2-nistp<256|384|521> or ssh-ed25519".format(key_type))
return({"type": key_type, "data": key_data, "name": key_name})
|
Add a library for misc functions for handling passwords, SSH keys etc.
|
Add a library for misc functions for handling passwords, SSH keys etc.
|
Python
|
lgpl-2.1
|
vyos/vyos-1x,vyos/vyos-1x,vyos/vyos-1x,vyos/vyos-1x
|
Add a library for misc functions for handling passwords, SSH keys etc.
|
# authutils -- miscelanneous functions for handling passwords and publis keys
#
# Copyright (C) 2018 VyOS maintainers and contributors
#
# This library is free software; you can redistribute it and/or modify it under the terms of
# the GNU Lesser General Public License as published by the Free Software Foundation;
# either version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this library;
# if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import re
from subprocess import Popen, PIPE, STDOUT
def make_password_hash(password):
""" Makes a password hash for /etc/shadow using mkpasswd """
mkpasswd = Popen(['mkpasswd', '--method=sha-512', '--stdin'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
hash = mkpasswd.communicate(input=password.encode(), timeout=5)[0].decode().strip()
return hash
def split_ssh_public_key(key_string, defaultname=""):
""" Splits an SSH public key into its components """
key_string = key_string.strip()
parts = re.split(r'\s+', key_string)
if len(parts) == 3:
key_type, key_data, key_name = parts[0], parts[1], parts[2]
else:
key_type, key_data, key_name = parts[0], parts[1], defaultname
if key_type not in ['ssh-rsa', 'ssh-dss', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519']:
raise ValueError("Bad key type \'{0}\', must be one of must be one of ssh-rsa, ssh-dss, ecdsa-sha2-nistp<256|384|521> or ssh-ed25519".format(key_type))
return({"type": key_type, "data": key_data, "name": key_name})
|
<commit_before><commit_msg>Add a library for misc functions for handling passwords, SSH keys etc.<commit_after>
|
# authutils -- miscelanneous functions for handling passwords and publis keys
#
# Copyright (C) 2018 VyOS maintainers and contributors
#
# This library is free software; you can redistribute it and/or modify it under the terms of
# the GNU Lesser General Public License as published by the Free Software Foundation;
# either version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this library;
# if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import re
from subprocess import Popen, PIPE, STDOUT
def make_password_hash(password):
""" Makes a password hash for /etc/shadow using mkpasswd """
mkpasswd = Popen(['mkpasswd', '--method=sha-512', '--stdin'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
hash = mkpasswd.communicate(input=password.encode(), timeout=5)[0].decode().strip()
return hash
def split_ssh_public_key(key_string, defaultname=""):
""" Splits an SSH public key into its components """
key_string = key_string.strip()
parts = re.split(r'\s+', key_string)
if len(parts) == 3:
key_type, key_data, key_name = parts[0], parts[1], parts[2]
else:
key_type, key_data, key_name = parts[0], parts[1], defaultname
if key_type not in ['ssh-rsa', 'ssh-dss', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519']:
raise ValueError("Bad key type \'{0}\', must be one of must be one of ssh-rsa, ssh-dss, ecdsa-sha2-nistp<256|384|521> or ssh-ed25519".format(key_type))
return({"type": key_type, "data": key_data, "name": key_name})
|
Add a library for misc functions for handling passwords, SSH keys etc.# authutils -- miscelanneous functions for handling passwords and publis keys
#
# Copyright (C) 2018 VyOS maintainers and contributors
#
# This library is free software; you can redistribute it and/or modify it under the terms of
# the GNU Lesser General Public License as published by the Free Software Foundation;
# either version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this library;
# if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import re
from subprocess import Popen, PIPE, STDOUT
def make_password_hash(password):
""" Makes a password hash for /etc/shadow using mkpasswd """
mkpasswd = Popen(['mkpasswd', '--method=sha-512', '--stdin'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
hash = mkpasswd.communicate(input=password.encode(), timeout=5)[0].decode().strip()
return hash
def split_ssh_public_key(key_string, defaultname=""):
""" Splits an SSH public key into its components """
key_string = key_string.strip()
parts = re.split(r'\s+', key_string)
if len(parts) == 3:
key_type, key_data, key_name = parts[0], parts[1], parts[2]
else:
key_type, key_data, key_name = parts[0], parts[1], defaultname
if key_type not in ['ssh-rsa', 'ssh-dss', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519']:
raise ValueError("Bad key type \'{0}\', must be one of must be one of ssh-rsa, ssh-dss, ecdsa-sha2-nistp<256|384|521> or ssh-ed25519".format(key_type))
return({"type": key_type, "data": key_data, "name": key_name})
|
<commit_before><commit_msg>Add a library for misc functions for handling passwords, SSH keys etc.<commit_after># authutils -- miscelanneous functions for handling passwords and publis keys
#
# Copyright (C) 2018 VyOS maintainers and contributors
#
# This library is free software; you can redistribute it and/or modify it under the terms of
# the GNU Lesser General Public License as published by the Free Software Foundation;
# either version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this library;
# if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import re
from subprocess import Popen, PIPE, STDOUT
def make_password_hash(password):
""" Makes a password hash for /etc/shadow using mkpasswd """
mkpasswd = Popen(['mkpasswd', '--method=sha-512', '--stdin'], stdout=PIPE, stdin=PIPE, stderr=PIPE)
hash = mkpasswd.communicate(input=password.encode(), timeout=5)[0].decode().strip()
return hash
def split_ssh_public_key(key_string, defaultname=""):
""" Splits an SSH public key into its components """
key_string = key_string.strip()
parts = re.split(r'\s+', key_string)
if len(parts) == 3:
key_type, key_data, key_name = parts[0], parts[1], parts[2]
else:
key_type, key_data, key_name = parts[0], parts[1], defaultname
if key_type not in ['ssh-rsa', 'ssh-dss', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'ssh-ed25519']:
raise ValueError("Bad key type \'{0}\', must be one of must be one of ssh-rsa, ssh-dss, ecdsa-sha2-nistp<256|384|521> or ssh-ed25519".format(key_type))
return({"type": key_type, "data": key_data, "name": key_name})
|
|
7069a8826f06338e0d95981316972147314c6e81
|
dn1/vozilo_test.py
|
dn1/vozilo_test.py
|
__author__ = 'nino'
import unittest
from jadrolinija import Vozilo
class VoziloTest(unittest.TestCase):
def test_something(self):
v = Vozilo('NM DK-34J', 425)
self.assertEqual(v.tablica, 'NM DK-34J')
self.assertEqual(v.dolzina, 425)
if __name__ == '__main__':
unittest.main()
|
Add unittests for Task 4
|
Add unittests for Task 4
|
Python
|
mit
|
nbasic/racunalnistvo-1
|
Add unittests for Task 4
|
__author__ = 'nino'
import unittest
from jadrolinija import Vozilo
class VoziloTest(unittest.TestCase):
def test_something(self):
v = Vozilo('NM DK-34J', 425)
self.assertEqual(v.tablica, 'NM DK-34J')
self.assertEqual(v.dolzina, 425)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unittests for Task 4<commit_after>
|
__author__ = 'nino'
import unittest
from jadrolinija import Vozilo
class VoziloTest(unittest.TestCase):
def test_something(self):
v = Vozilo('NM DK-34J', 425)
self.assertEqual(v.tablica, 'NM DK-34J')
self.assertEqual(v.dolzina, 425)
if __name__ == '__main__':
unittest.main()
|
Add unittests for Task 4__author__ = 'nino'
import unittest
from jadrolinija import Vozilo
class VoziloTest(unittest.TestCase):
def test_something(self):
v = Vozilo('NM DK-34J', 425)
self.assertEqual(v.tablica, 'NM DK-34J')
self.assertEqual(v.dolzina, 425)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unittests for Task 4<commit_after>__author__ = 'nino'
import unittest
from jadrolinija import Vozilo
class VoziloTest(unittest.TestCase):
def test_something(self):
v = Vozilo('NM DK-34J', 425)
self.assertEqual(v.tablica, 'NM DK-34J')
self.assertEqual(v.dolzina, 425)
if __name__ == '__main__':
unittest.main()
|
|
48e94dff15394fe04ed61102dd51a7543912706a
|
host-test/pipe-test.py
|
host-test/pipe-test.py
|
import json
import subprocess
import struct
import sys
import unittest
# The protocol datagram is described here:
# https://developer.chrome.com/extensions/nativeMessaging#native-messaging-host-protocol
def get_exe():
if sys.platform == 'darwin':
return "host-osx/build/Release/chrome-token-signing.app/Contents/MacOS/chrome-token-signing"
elif sys.platform == "linux2":
return "host-linux/out/chrome-token-signing"
else:
print("Unsupported platform: %s" % sys.platform)
sys.exit(1)
class TestHostPipe(unittest.TestCase):
def get_response(self):
response_length = struct.unpack("=I", self.p.stdout.read(4))[0]
response = str(self.p.stdout.read(response_length))
# make it into "oneline" json before printing
response_print = json.dumps(json.loads(response))
print ("RECV: %s" % response_print)
return json.loads(response)
def transceive(self, msg):
# send like described in
print ("SEND: %s" % msg)
self.p.stdin.write(struct.pack("=I", len(msg)))
self.p.stdin.write(msg)
# now read the input
return self.get_response()
def setUp(self):
should_close_fds = sys.platform.startswith('win32') == False;
self.p = subprocess.Popen(get_exe(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=should_close_fds, stderr=None)
print ("Running native component on PID %d" % self.p.pid)
def tearDown(self):
self.p.terminate()
self.p.wait()
def test_random_string(self):
cmd = "BLAH"
resp = self.transceive(cmd)
self.assertEquals(resp["result"], "invalid_argument")
def test_plain_string(self):
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
def test_utopic_length(self):
# write big bumber and little data
self.p.stdin.write(struct.pack("=I", 0xFFFFFFFF))
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
if __name__ == '__main__':
# run tests
unittest.main()
|
Add generic "pipe" test for native component
|
Add generic "pipe" test for native component
|
Python
|
lgpl-2.1
|
metsma/chrome-token-signing,cristiano-andrade/chrome-token-signing,fabiorusso/chrome-token-signing,metsma/chrome-token-signing,open-eid/chrome-token-signing,metsma/chrome-token-signing,cristiano-andrade/chrome-token-signing,cristiano-andrade/chrome-token-signing,open-eid/chrome-token-signing,fabiorusso/chrome-token-signing,open-eid/chrome-token-signing,metsma/chrome-token-signing,cristiano-andrade/chrome-token-signing,fabiorusso/chrome-token-signing,open-eid/chrome-token-signing,metsma/chrome-token-signing,open-eid/chrome-token-signing,fabiorusso/chrome-token-signing
|
Add generic "pipe" test for native component
|
import json
import subprocess
import struct
import sys
import unittest
# The protocol datagram is described here:
# https://developer.chrome.com/extensions/nativeMessaging#native-messaging-host-protocol
def get_exe():
if sys.platform == 'darwin':
return "host-osx/build/Release/chrome-token-signing.app/Contents/MacOS/chrome-token-signing"
elif sys.platform == "linux2":
return "host-linux/out/chrome-token-signing"
else:
print("Unsupported platform: %s" % sys.platform)
sys.exit(1)
class TestHostPipe(unittest.TestCase):
def get_response(self):
response_length = struct.unpack("=I", self.p.stdout.read(4))[0]
response = str(self.p.stdout.read(response_length))
# make it into "oneline" json before printing
response_print = json.dumps(json.loads(response))
print ("RECV: %s" % response_print)
return json.loads(response)
def transceive(self, msg):
# send like described in
print ("SEND: %s" % msg)
self.p.stdin.write(struct.pack("=I", len(msg)))
self.p.stdin.write(msg)
# now read the input
return self.get_response()
def setUp(self):
should_close_fds = sys.platform.startswith('win32') == False;
self.p = subprocess.Popen(get_exe(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=should_close_fds, stderr=None)
print ("Running native component on PID %d" % self.p.pid)
def tearDown(self):
self.p.terminate()
self.p.wait()
def test_random_string(self):
cmd = "BLAH"
resp = self.transceive(cmd)
self.assertEquals(resp["result"], "invalid_argument")
def test_plain_string(self):
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
def test_utopic_length(self):
# write big bumber and little data
self.p.stdin.write(struct.pack("=I", 0xFFFFFFFF))
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
if __name__ == '__main__':
# run tests
unittest.main()
|
<commit_before><commit_msg>Add generic "pipe" test for native component<commit_after>
|
import json
import subprocess
import struct
import sys
import unittest
# The protocol datagram is described here:
# https://developer.chrome.com/extensions/nativeMessaging#native-messaging-host-protocol
def get_exe():
if sys.platform == 'darwin':
return "host-osx/build/Release/chrome-token-signing.app/Contents/MacOS/chrome-token-signing"
elif sys.platform == "linux2":
return "host-linux/out/chrome-token-signing"
else:
print("Unsupported platform: %s" % sys.platform)
sys.exit(1)
class TestHostPipe(unittest.TestCase):
def get_response(self):
response_length = struct.unpack("=I", self.p.stdout.read(4))[0]
response = str(self.p.stdout.read(response_length))
# make it into "oneline" json before printing
response_print = json.dumps(json.loads(response))
print ("RECV: %s" % response_print)
return json.loads(response)
def transceive(self, msg):
# send like described in
print ("SEND: %s" % msg)
self.p.stdin.write(struct.pack("=I", len(msg)))
self.p.stdin.write(msg)
# now read the input
return self.get_response()
def setUp(self):
should_close_fds = sys.platform.startswith('win32') == False;
self.p = subprocess.Popen(get_exe(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=should_close_fds, stderr=None)
print ("Running native component on PID %d" % self.p.pid)
def tearDown(self):
self.p.terminate()
self.p.wait()
def test_random_string(self):
cmd = "BLAH"
resp = self.transceive(cmd)
self.assertEquals(resp["result"], "invalid_argument")
def test_plain_string(self):
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
def test_utopic_length(self):
# write big bumber and little data
self.p.stdin.write(struct.pack("=I", 0xFFFFFFFF))
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
if __name__ == '__main__':
# run tests
unittest.main()
|
Add generic "pipe" test for native componentimport json
import subprocess
import struct
import sys
import unittest
# The protocol datagram is described here:
# https://developer.chrome.com/extensions/nativeMessaging#native-messaging-host-protocol
def get_exe():
if sys.platform == 'darwin':
return "host-osx/build/Release/chrome-token-signing.app/Contents/MacOS/chrome-token-signing"
elif sys.platform == "linux2":
return "host-linux/out/chrome-token-signing"
else:
print("Unsupported platform: %s" % sys.platform)
sys.exit(1)
class TestHostPipe(unittest.TestCase):
def get_response(self):
response_length = struct.unpack("=I", self.p.stdout.read(4))[0]
response = str(self.p.stdout.read(response_length))
# make it into "oneline" json before printing
response_print = json.dumps(json.loads(response))
print ("RECV: %s" % response_print)
return json.loads(response)
def transceive(self, msg):
# send like described in
print ("SEND: %s" % msg)
self.p.stdin.write(struct.pack("=I", len(msg)))
self.p.stdin.write(msg)
# now read the input
return self.get_response()
def setUp(self):
should_close_fds = sys.platform.startswith('win32') == False;
self.p = subprocess.Popen(get_exe(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=should_close_fds, stderr=None)
print ("Running native component on PID %d" % self.p.pid)
def tearDown(self):
self.p.terminate()
self.p.wait()
def test_random_string(self):
cmd = "BLAH"
resp = self.transceive(cmd)
self.assertEquals(resp["result"], "invalid_argument")
def test_plain_string(self):
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
def test_utopic_length(self):
# write big bumber and little data
self.p.stdin.write(struct.pack("=I", 0xFFFFFFFF))
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
if __name__ == '__main__':
# run tests
unittest.main()
|
<commit_before><commit_msg>Add generic "pipe" test for native component<commit_after>import json
import subprocess
import struct
import sys
import unittest
# The protocol datagram is described here:
# https://developer.chrome.com/extensions/nativeMessaging#native-messaging-host-protocol
def get_exe():
if sys.platform == 'darwin':
return "host-osx/build/Release/chrome-token-signing.app/Contents/MacOS/chrome-token-signing"
elif sys.platform == "linux2":
return "host-linux/out/chrome-token-signing"
else:
print("Unsupported platform: %s" % sys.platform)
sys.exit(1)
class TestHostPipe(unittest.TestCase):
def get_response(self):
response_length = struct.unpack("=I", self.p.stdout.read(4))[0]
response = str(self.p.stdout.read(response_length))
# make it into "oneline" json before printing
response_print = json.dumps(json.loads(response))
print ("RECV: %s" % response_print)
return json.loads(response)
def transceive(self, msg):
# send like described in
print ("SEND: %s" % msg)
self.p.stdin.write(struct.pack("=I", len(msg)))
self.p.stdin.write(msg)
# now read the input
return self.get_response()
def setUp(self):
should_close_fds = sys.platform.startswith('win32') == False;
self.p = subprocess.Popen(get_exe(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=should_close_fds, stderr=None)
print ("Running native component on PID %d" % self.p.pid)
def tearDown(self):
self.p.terminate()
self.p.wait()
def test_random_string(self):
cmd = "BLAH"
resp = self.transceive(cmd)
self.assertEquals(resp["result"], "invalid_argument")
def test_plain_string(self):
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
def test_utopic_length(self):
# write big bumber and little data
self.p.stdin.write(struct.pack("=I", 0xFFFFFFFF))
self.p.stdin.write("Hello World!")
resp = self.get_response()
self.assertEquals(resp["result"], "invalid_argument")
if __name__ == '__main__':
# run tests
unittest.main()
|
|
4fe4408736268e3a2c8437cca70625b4fc1a4a3c
|
i3pystatus/pianobar.py
|
i3pystatus/pianobar.py
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile,"w").write("p")
def on_rightclick(self):
open(self.ctlfile,"w").write("n")
def on_upscroll(self):
open(self.ctlfile,"w").write(")")
def on_downscroll(self):
open(self.ctlfile,"w").write("(")
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
Fix blank lines and whitespaces
|
Fix blank lines and whitespaces
|
Python
|
mit
|
m45t3r/i3pystatus,facetoe/i3pystatus,ismaelpuerto/i3pystatus,ismaelpuerto/i3pystatus,plumps/i3pystatus,schroeji/i3pystatus,teto/i3pystatus,plumps/i3pystatus,teto/i3pystatus,facetoe/i3pystatus,ncoop/i3pystatus,Elder-of-Ozone/i3pystatus,enkore/i3pystatus,juliushaertl/i3pystatus,enkore/i3pystatus,yang-ling/i3pystatus,MaicoTimmerman/i3pystatus,drwahl/i3pystatus,asmikhailov/i3pystatus,Arvedui/i3pystatus,asmikhailov/i3pystatus,drwahl/i3pystatus,MaicoTimmerman/i3pystatus,yang-ling/i3pystatus,richese/i3pystatus,Arvedui/i3pystatus,m45t3r/i3pystatus,ncoop/i3pystatus,paulollivier/i3pystatus,paulollivier/i3pystatus,juliushaertl/i3pystatus,fmarchenko/i3pystatus,schroeji/i3pystatus,claria/i3pystatus,opatut/i3pystatus,richese/i3pystatus,Elder-of-Ozone/i3pystatus,eBrnd/i3pystatus,onkelpit/i3pystatus,opatut/i3pystatus,fmarchenko/i3pystatus,onkelpit/i3pystatus,claria/i3pystatus,eBrnd/i3pystatus
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile,"w").write("p")
def on_rightclick(self):
open(self.ctlfile,"w").write("n")
def on_upscroll(self):
open(self.ctlfile,"w").write(")")
def on_downscroll(self):
open(self.ctlfile,"w").write("(")
Fix blank lines and whitespaces
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
<commit_before>from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile,"w").write("p")
def on_rightclick(self):
open(self.ctlfile,"w").write("n")
def on_upscroll(self):
open(self.ctlfile,"w").write(")")
def on_downscroll(self):
open(self.ctlfile,"w").write("(")
<commit_msg>Fix blank lines and whitespaces<commit_after>
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile,"w").write("p")
def on_rightclick(self):
open(self.ctlfile,"w").write("n")
def on_upscroll(self):
open(self.ctlfile,"w").write(")")
def on_downscroll(self):
open(self.ctlfile,"w").write("(")
Fix blank lines and whitespacesfrom i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
<commit_before>from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile,"w").write("p")
def on_rightclick(self):
open(self.ctlfile,"w").write("n")
def on_upscroll(self):
open(self.ctlfile,"w").write(")")
def on_downscroll(self):
open(self.ctlfile,"w").write("(")
<commit_msg>Fix blank lines and whitespaces<commit_after>from i3pystatus import IntervalModule
class Pianobar(IntervalModule):
"""
Shows the title and artist name of the current music
In pianobar config file must be setted the fifo and event_command options
(see man pianobar for more information)
Mouse events:
- Left click play/pauses
- Right click plays next song
- Scroll up/down changes volume
"""
settings = (
("format"),
("songfile", "File generated by pianobar eventcmd"),
("ctlfile", "Pianobar fifo file"),
("color", "The color of the text"),
)
format = "{songtitle} -- {songartist}"
required = ("format", "songfile", "ctlfile")
color = "#FFFFFF"
def run(self):
with open(self.songfile, "r") as f:
contents = f.readlines()
sn = contents[0].strip()
sa = contents[1].strip()
self.output = {
"full_text": self.format.format(songtitle=sn, songartist=sa),
"color": self.color
}
def on_leftclick(self):
open(self.ctlfile, "w").write("p")
def on_rightclick(self):
open(self.ctlfile, "w").write("n")
def on_upscroll(self):
open(self.ctlfile, "w").write(")")
def on_downscroll(self):
open(self.ctlfile, "w").write("(")
|
271f5921b8a15abe3f29296ec0b474167cd415cf
|
concord_ml/data/computations.py
|
concord_ml/data/computations.py
|
import time
from concord.computation import Computation, Metadata
def current_time_millis():
return round(time.time() * 1000)
class Generator(Computation):
def __init__(self, iterable, name, ostreams, time_delta=500):
self.ostreams = ostreams
self.name = name
self.time_delta = time_delta
self.iterator = iter(iterable)
def init(self, context):
context.set_timer("{}_generator".format(self.name),
current_time_millis())
def process_timer(self, context, key, time):
try:
value = next(self.iterator)
except StopIteration:
return
for stream in self.ostreams:
context.produce_record(stream, "key", value)
context.set_timer("{}_generator".format(self.name),
current_time_millis() + self.time_delta)
def process_record(self, context, record):
raise NotImplementedError("process_record not implemented")
def metadata(self):
return Metadata(name=self.name, istreams=[], ostreams=self.ostreams)
|
Add basic framework for generators
|
Add basic framework for generators
|
Python
|
apache-2.0
|
concord/bfd,alanhdu/bfd,concord/ml,AndrewAday/bfd
|
Add basic framework for generators
|
import time
from concord.computation import Computation, Metadata
def current_time_millis():
return round(time.time() * 1000)
class Generator(Computation):
def __init__(self, iterable, name, ostreams, time_delta=500):
self.ostreams = ostreams
self.name = name
self.time_delta = time_delta
self.iterator = iter(iterable)
def init(self, context):
context.set_timer("{}_generator".format(self.name),
current_time_millis())
def process_timer(self, context, key, time):
try:
value = next(self.iterator)
except StopIteration:
return
for stream in self.ostreams:
context.produce_record(stream, "key", value)
context.set_timer("{}_generator".format(self.name),
current_time_millis() + self.time_delta)
def process_record(self, context, record):
raise NotImplementedError("process_record not implemented")
def metadata(self):
return Metadata(name=self.name, istreams=[], ostreams=self.ostreams)
|
<commit_before><commit_msg>Add basic framework for generators<commit_after>
|
import time
from concord.computation import Computation, Metadata
def current_time_millis():
return round(time.time() * 1000)
class Generator(Computation):
def __init__(self, iterable, name, ostreams, time_delta=500):
self.ostreams = ostreams
self.name = name
self.time_delta = time_delta
self.iterator = iter(iterable)
def init(self, context):
context.set_timer("{}_generator".format(self.name),
current_time_millis())
def process_timer(self, context, key, time):
try:
value = next(self.iterator)
except StopIteration:
return
for stream in self.ostreams:
context.produce_record(stream, "key", value)
context.set_timer("{}_generator".format(self.name),
current_time_millis() + self.time_delta)
def process_record(self, context, record):
raise NotImplementedError("process_record not implemented")
def metadata(self):
return Metadata(name=self.name, istreams=[], ostreams=self.ostreams)
|
Add basic framework for generatorsimport time
from concord.computation import Computation, Metadata
def current_time_millis():
return round(time.time() * 1000)
class Generator(Computation):
def __init__(self, iterable, name, ostreams, time_delta=500):
self.ostreams = ostreams
self.name = name
self.time_delta = time_delta
self.iterator = iter(iterable)
def init(self, context):
context.set_timer("{}_generator".format(self.name),
current_time_millis())
def process_timer(self, context, key, time):
try:
value = next(self.iterator)
except StopIteration:
return
for stream in self.ostreams:
context.produce_record(stream, "key", value)
context.set_timer("{}_generator".format(self.name),
current_time_millis() + self.time_delta)
def process_record(self, context, record):
raise NotImplementedError("process_record not implemented")
def metadata(self):
return Metadata(name=self.name, istreams=[], ostreams=self.ostreams)
|
<commit_before><commit_msg>Add basic framework for generators<commit_after>import time
from concord.computation import Computation, Metadata
def current_time_millis():
return round(time.time() * 1000)
class Generator(Computation):
def __init__(self, iterable, name, ostreams, time_delta=500):
self.ostreams = ostreams
self.name = name
self.time_delta = time_delta
self.iterator = iter(iterable)
def init(self, context):
context.set_timer("{}_generator".format(self.name),
current_time_millis())
def process_timer(self, context, key, time):
try:
value = next(self.iterator)
except StopIteration:
return
for stream in self.ostreams:
context.produce_record(stream, "key", value)
context.set_timer("{}_generator".format(self.name),
current_time_millis() + self.time_delta)
def process_record(self, context, record):
raise NotImplementedError("process_record not implemented")
def metadata(self):
return Metadata(name=self.name, istreams=[], ostreams=self.ostreams)
|
|
be1f66af24e3a286cd82f092c3ee7e5b6ad9df69
|
tests/test_helpers.py
|
tests/test_helpers.py
|
import tempfile
import unittest
import ephem
from pygcvs.helpers import dict_to_body, read_gcvs
GCVS_CONTENTS = """
NNo GCVS 2000.0 Type Max Min I Min II Epoch Year Period M-m Spectrum References Other design
------------------ ---------------------------------------------------------------------------------------------------------------- -----------------------------------------
320031 |TX Del *|205012.7+033908 |CWB: | 8.84 | 9.54 | |V |42947.009 | | 6.165907 |33 |G0-G5 |08632 08953|
"""
class ReadGcvsTestCase(unittest.TestCase):
def test_open_file(self):
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write(GCVS_CONTENTS)
f.seek(0)
stars = read_gcvs(f.name)
star = next(stars)
self.assertEqual(star['name'], 'TX DEL')
class DictToBodyTestCase(unittest.TestCase):
def setUp(self):
self.star = {
'name': 'TX DEL',
'ra': '20:50:12.7',
'dec': '+03:39:08',
}
def test_basics(self):
body = dict_to_body(self.star)
self.assertIsInstance(body, ephem.FixedBody)
self.assertEqual(body.name, 'TX DEL')
|
Add basic tests for helper functions.
|
Add basic tests for helper functions.
|
Python
|
mit
|
zsiciarz/pygcvs
|
Add basic tests for helper functions.
|
import tempfile
import unittest
import ephem
from pygcvs.helpers import dict_to_body, read_gcvs
GCVS_CONTENTS = """
NNo GCVS 2000.0 Type Max Min I Min II Epoch Year Period M-m Spectrum References Other design
------------------ ---------------------------------------------------------------------------------------------------------------- -----------------------------------------
320031 |TX Del *|205012.7+033908 |CWB: | 8.84 | 9.54 | |V |42947.009 | | 6.165907 |33 |G0-G5 |08632 08953|
"""
class ReadGcvsTestCase(unittest.TestCase):
def test_open_file(self):
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write(GCVS_CONTENTS)
f.seek(0)
stars = read_gcvs(f.name)
star = next(stars)
self.assertEqual(star['name'], 'TX DEL')
class DictToBodyTestCase(unittest.TestCase):
def setUp(self):
self.star = {
'name': 'TX DEL',
'ra': '20:50:12.7',
'dec': '+03:39:08',
}
def test_basics(self):
body = dict_to_body(self.star)
self.assertIsInstance(body, ephem.FixedBody)
self.assertEqual(body.name, 'TX DEL')
|
<commit_before><commit_msg>Add basic tests for helper functions.<commit_after>
|
import tempfile
import unittest
import ephem
from pygcvs.helpers import dict_to_body, read_gcvs
GCVS_CONTENTS = """
NNo GCVS 2000.0 Type Max Min I Min II Epoch Year Period M-m Spectrum References Other design
------------------ ---------------------------------------------------------------------------------------------------------------- -----------------------------------------
320031 |TX Del *|205012.7+033908 |CWB: | 8.84 | 9.54 | |V |42947.009 | | 6.165907 |33 |G0-G5 |08632 08953|
"""
class ReadGcvsTestCase(unittest.TestCase):
def test_open_file(self):
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write(GCVS_CONTENTS)
f.seek(0)
stars = read_gcvs(f.name)
star = next(stars)
self.assertEqual(star['name'], 'TX DEL')
class DictToBodyTestCase(unittest.TestCase):
def setUp(self):
self.star = {
'name': 'TX DEL',
'ra': '20:50:12.7',
'dec': '+03:39:08',
}
def test_basics(self):
body = dict_to_body(self.star)
self.assertIsInstance(body, ephem.FixedBody)
self.assertEqual(body.name, 'TX DEL')
|
Add basic tests for helper functions.import tempfile
import unittest
import ephem
from pygcvs.helpers import dict_to_body, read_gcvs
GCVS_CONTENTS = """
NNo GCVS 2000.0 Type Max Min I Min II Epoch Year Period M-m Spectrum References Other design
------------------ ---------------------------------------------------------------------------------------------------------------- -----------------------------------------
320031 |TX Del *|205012.7+033908 |CWB: | 8.84 | 9.54 | |V |42947.009 | | 6.165907 |33 |G0-G5 |08632 08953|
"""
class ReadGcvsTestCase(unittest.TestCase):
def test_open_file(self):
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write(GCVS_CONTENTS)
f.seek(0)
stars = read_gcvs(f.name)
star = next(stars)
self.assertEqual(star['name'], 'TX DEL')
class DictToBodyTestCase(unittest.TestCase):
def setUp(self):
self.star = {
'name': 'TX DEL',
'ra': '20:50:12.7',
'dec': '+03:39:08',
}
def test_basics(self):
body = dict_to_body(self.star)
self.assertIsInstance(body, ephem.FixedBody)
self.assertEqual(body.name, 'TX DEL')
|
<commit_before><commit_msg>Add basic tests for helper functions.<commit_after>import tempfile
import unittest
import ephem
from pygcvs.helpers import dict_to_body, read_gcvs
GCVS_CONTENTS = """
NNo GCVS 2000.0 Type Max Min I Min II Epoch Year Period M-m Spectrum References Other design
------------------ ---------------------------------------------------------------------------------------------------------------- -----------------------------------------
320031 |TX Del *|205012.7+033908 |CWB: | 8.84 | 9.54 | |V |42947.009 | | 6.165907 |33 |G0-G5 |08632 08953|
"""
class ReadGcvsTestCase(unittest.TestCase):
def test_open_file(self):
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write(GCVS_CONTENTS)
f.seek(0)
stars = read_gcvs(f.name)
star = next(stars)
self.assertEqual(star['name'], 'TX DEL')
class DictToBodyTestCase(unittest.TestCase):
def setUp(self):
self.star = {
'name': 'TX DEL',
'ra': '20:50:12.7',
'dec': '+03:39:08',
}
def test_basics(self):
body = dict_to_body(self.star)
self.assertIsInstance(body, ephem.FixedBody)
self.assertEqual(body.name, 'TX DEL')
|
|
c60ec4b373a812e8207a2f1c22c11ff2a24ee268
|
tests/test_scanner.py
|
tests/test_scanner.py
|
import pytest
from katana.expr import Expr, Token
from katana.parser import Scanner
class TestScanner:
scanner = Scanner([
Expr('dollar', r'\$'),
Expr('number', r'[0-9]+'),
])
def test_scan_complete(self):
assert self.scanner.scan('$12') == [
Token('dollar', '$'),
Token('number', '12'),
]
def test_scan_incomplete(self):
with pytest.raises(ValueError):
self.scanner.scan('12#')
|
Add tests for Scanner class
|
Add tests for Scanner class
|
Python
|
mit
|
eugene-eeo/katana
|
Add tests for Scanner class
|
import pytest
from katana.expr import Expr, Token
from katana.parser import Scanner
class TestScanner:
scanner = Scanner([
Expr('dollar', r'\$'),
Expr('number', r'[0-9]+'),
])
def test_scan_complete(self):
assert self.scanner.scan('$12') == [
Token('dollar', '$'),
Token('number', '12'),
]
def test_scan_incomplete(self):
with pytest.raises(ValueError):
self.scanner.scan('12#')
|
<commit_before><commit_msg>Add tests for Scanner class<commit_after>
|
import pytest
from katana.expr import Expr, Token
from katana.parser import Scanner
class TestScanner:
scanner = Scanner([
Expr('dollar', r'\$'),
Expr('number', r'[0-9]+'),
])
def test_scan_complete(self):
assert self.scanner.scan('$12') == [
Token('dollar', '$'),
Token('number', '12'),
]
def test_scan_incomplete(self):
with pytest.raises(ValueError):
self.scanner.scan('12#')
|
Add tests for Scanner classimport pytest
from katana.expr import Expr, Token
from katana.parser import Scanner
class TestScanner:
scanner = Scanner([
Expr('dollar', r'\$'),
Expr('number', r'[0-9]+'),
])
def test_scan_complete(self):
assert self.scanner.scan('$12') == [
Token('dollar', '$'),
Token('number', '12'),
]
def test_scan_incomplete(self):
with pytest.raises(ValueError):
self.scanner.scan('12#')
|
<commit_before><commit_msg>Add tests for Scanner class<commit_after>import pytest
from katana.expr import Expr, Token
from katana.parser import Scanner
class TestScanner:
scanner = Scanner([
Expr('dollar', r'\$'),
Expr('number', r'[0-9]+'),
])
def test_scan_complete(self):
assert self.scanner.scan('$12') == [
Token('dollar', '$'),
Token('number', '12'),
]
def test_scan_incomplete(self):
with pytest.raises(ValueError):
self.scanner.scan('12#')
|
|
0ce008663983a312a6f5d7211aa3767fd6c5f747
|
migrations/versions/0072_add_dvla_orgs.py
|
migrations/versions/0072_add_dvla_orgs.py
|
"""empty message
Revision ID: 0072_add_dvla_orgs
Revises: 0071_add_job_error_state
Create Date: 2017-04-19 15:25:45.155886
"""
# revision identifiers, used by Alembic.
revision = '0072_add_dvla_orgs'
down_revision = '0071_add_job_error_state'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('dvla_organisation',
sa.Column('id', sa.String(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# insert initial values - HMG and Land Reg
op.execute("""
INSERT INTO dvla_organisation VALUES
('001', 'HM Government'),
('500', 'Land Registry')
""")
op.add_column('services', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
op.add_column('services_history', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
# set everything to be HMG for now
op.execute("UPDATE services SET dvla_organisation_id = '001'")
op.execute("UPDATE services_history SET dvla_organisation_id = '001'")
op.alter_column('services', 'dvla_organisation_id', nullable=False)
op.alter_column('services_history', 'dvla_organisation_id', nullable=False)
op.create_index(
op.f('ix_services_dvla_organisation_id'),
'services',
['dvla_organisation_id'],
unique=False
)
op.create_index(
op.f('ix_services_history_dvla_organisation_id'),
'services_history',
['dvla_organisation_id'],
unique=False
)
op.create_foreign_key(None, 'services', 'dvla_organisation', ['dvla_organisation_id'], ['id'])
def downgrade():
op.drop_column('services_history', 'dvla_organisation_id')
op.drop_column('services', 'dvla_organisation_id')
op.drop_table('dvla_organisation')
|
Add DVLA org migration with default values
|
Add DVLA org migration with default values
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add DVLA org migration with default values
|
"""empty message
Revision ID: 0072_add_dvla_orgs
Revises: 0071_add_job_error_state
Create Date: 2017-04-19 15:25:45.155886
"""
# revision identifiers, used by Alembic.
revision = '0072_add_dvla_orgs'
down_revision = '0071_add_job_error_state'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('dvla_organisation',
sa.Column('id', sa.String(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# insert initial values - HMG and Land Reg
op.execute("""
INSERT INTO dvla_organisation VALUES
('001', 'HM Government'),
('500', 'Land Registry')
""")
op.add_column('services', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
op.add_column('services_history', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
# set everything to be HMG for now
op.execute("UPDATE services SET dvla_organisation_id = '001'")
op.execute("UPDATE services_history SET dvla_organisation_id = '001'")
op.alter_column('services', 'dvla_organisation_id', nullable=False)
op.alter_column('services_history', 'dvla_organisation_id', nullable=False)
op.create_index(
op.f('ix_services_dvla_organisation_id'),
'services',
['dvla_organisation_id'],
unique=False
)
op.create_index(
op.f('ix_services_history_dvla_organisation_id'),
'services_history',
['dvla_organisation_id'],
unique=False
)
op.create_foreign_key(None, 'services', 'dvla_organisation', ['dvla_organisation_id'], ['id'])
def downgrade():
op.drop_column('services_history', 'dvla_organisation_id')
op.drop_column('services', 'dvla_organisation_id')
op.drop_table('dvla_organisation')
|
<commit_before><commit_msg>Add DVLA org migration with default values<commit_after>
|
"""empty message
Revision ID: 0072_add_dvla_orgs
Revises: 0071_add_job_error_state
Create Date: 2017-04-19 15:25:45.155886
"""
# revision identifiers, used by Alembic.
revision = '0072_add_dvla_orgs'
down_revision = '0071_add_job_error_state'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('dvla_organisation',
sa.Column('id', sa.String(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# insert initial values - HMG and Land Reg
op.execute("""
INSERT INTO dvla_organisation VALUES
('001', 'HM Government'),
('500', 'Land Registry')
""")
op.add_column('services', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
op.add_column('services_history', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
# set everything to be HMG for now
op.execute("UPDATE services SET dvla_organisation_id = '001'")
op.execute("UPDATE services_history SET dvla_organisation_id = '001'")
op.alter_column('services', 'dvla_organisation_id', nullable=False)
op.alter_column('services_history', 'dvla_organisation_id', nullable=False)
op.create_index(
op.f('ix_services_dvla_organisation_id'),
'services',
['dvla_organisation_id'],
unique=False
)
op.create_index(
op.f('ix_services_history_dvla_organisation_id'),
'services_history',
['dvla_organisation_id'],
unique=False
)
op.create_foreign_key(None, 'services', 'dvla_organisation', ['dvla_organisation_id'], ['id'])
def downgrade():
op.drop_column('services_history', 'dvla_organisation_id')
op.drop_column('services', 'dvla_organisation_id')
op.drop_table('dvla_organisation')
|
Add DVLA org migration with default values"""empty message
Revision ID: 0072_add_dvla_orgs
Revises: 0071_add_job_error_state
Create Date: 2017-04-19 15:25:45.155886
"""
# revision identifiers, used by Alembic.
revision = '0072_add_dvla_orgs'
down_revision = '0071_add_job_error_state'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('dvla_organisation',
sa.Column('id', sa.String(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# insert initial values - HMG and Land Reg
op.execute("""
INSERT INTO dvla_organisation VALUES
('001', 'HM Government'),
('500', 'Land Registry')
""")
op.add_column('services', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
op.add_column('services_history', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
# set everything to be HMG for now
op.execute("UPDATE services SET dvla_organisation_id = '001'")
op.execute("UPDATE services_history SET dvla_organisation_id = '001'")
op.alter_column('services', 'dvla_organisation_id', nullable=False)
op.alter_column('services_history', 'dvla_organisation_id', nullable=False)
op.create_index(
op.f('ix_services_dvla_organisation_id'),
'services',
['dvla_organisation_id'],
unique=False
)
op.create_index(
op.f('ix_services_history_dvla_organisation_id'),
'services_history',
['dvla_organisation_id'],
unique=False
)
op.create_foreign_key(None, 'services', 'dvla_organisation', ['dvla_organisation_id'], ['id'])
def downgrade():
op.drop_column('services_history', 'dvla_organisation_id')
op.drop_column('services', 'dvla_organisation_id')
op.drop_table('dvla_organisation')
|
<commit_before><commit_msg>Add DVLA org migration with default values<commit_after>"""empty message
Revision ID: 0072_add_dvla_orgs
Revises: 0071_add_job_error_state
Create Date: 2017-04-19 15:25:45.155886
"""
# revision identifiers, used by Alembic.
revision = '0072_add_dvla_orgs'
down_revision = '0071_add_job_error_state'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('dvla_organisation',
sa.Column('id', sa.String(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# insert initial values - HMG and Land Reg
op.execute("""
INSERT INTO dvla_organisation VALUES
('001', 'HM Government'),
('500', 'Land Registry')
""")
op.add_column('services', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
op.add_column('services_history', sa.Column('dvla_organisation_id', sa.String(), nullable=True, server_default='001'))
# set everything to be HMG for now
op.execute("UPDATE services SET dvla_organisation_id = '001'")
op.execute("UPDATE services_history SET dvla_organisation_id = '001'")
op.alter_column('services', 'dvla_organisation_id', nullable=False)
op.alter_column('services_history', 'dvla_organisation_id', nullable=False)
op.create_index(
op.f('ix_services_dvla_organisation_id'),
'services',
['dvla_organisation_id'],
unique=False
)
op.create_index(
op.f('ix_services_history_dvla_organisation_id'),
'services_history',
['dvla_organisation_id'],
unique=False
)
op.create_foreign_key(None, 'services', 'dvla_organisation', ['dvla_organisation_id'], ['id'])
def downgrade():
op.drop_column('services_history', 'dvla_organisation_id')
op.drop_column('services', 'dvla_organisation_id')
op.drop_table('dvla_organisation')
|
|
e55dd3d22f8ef23087bd21504d402c7c6f7aa4ba
|
test/mask_test.py
|
test/mask_test.py
|
#################################### IMPORTS ###################################
if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests.test_utils import test_not_implemented, unittest
else:
from test.test_utils import test_not_implemented, unittest
from pygame import mask
# Stuff needed for tests
from pygame import surface
from pygame.locals import SRCALPHA
################################################################################
class MaskTestModule(unittest.TestCase):
def test_from_surface(self):
surf = surface.Surface((300, 100), depth=32, flags=SRCALPHA)
surf.fill((0, 0, 0, 0xff))
for x in range(200):
surf.set_at((x, 20), (0, 0, 0, x))
M = mask.from_surface(surf)
self.assertEqual(M.get_at((0, 0)), 1)
self.assertEqual(M.get_at((20, 20)), 0)
self.assertEqual(M.get_at((21, 20)), 0)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((127, 20)), 0)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
self.assertEqual(M.get_at((200, 20)), 1)
self.assertEqual(M.get_at((21, 21)), 1)
# Different threshold
M = mask.from_surface(surf, 50)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((51, 20)), 1)
self.assertEqual(M.get_at((127, 20)), 1)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
################################################################################
if __name__ == '__main__':
unittest.main()
|
Add something of a pygame.mask test suite
|
Add something of a pygame.mask test suite
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,caseyc37/pygame_cffi
|
Add something of a pygame.mask test suite
|
#################################### IMPORTS ###################################
if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests.test_utils import test_not_implemented, unittest
else:
from test.test_utils import test_not_implemented, unittest
from pygame import mask
# Stuff needed for tests
from pygame import surface
from pygame.locals import SRCALPHA
################################################################################
class MaskTestModule(unittest.TestCase):
def test_from_surface(self):
surf = surface.Surface((300, 100), depth=32, flags=SRCALPHA)
surf.fill((0, 0, 0, 0xff))
for x in range(200):
surf.set_at((x, 20), (0, 0, 0, x))
M = mask.from_surface(surf)
self.assertEqual(M.get_at((0, 0)), 1)
self.assertEqual(M.get_at((20, 20)), 0)
self.assertEqual(M.get_at((21, 20)), 0)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((127, 20)), 0)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
self.assertEqual(M.get_at((200, 20)), 1)
self.assertEqual(M.get_at((21, 21)), 1)
# Different threshold
M = mask.from_surface(surf, 50)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((51, 20)), 1)
self.assertEqual(M.get_at((127, 20)), 1)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
################################################################################
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add something of a pygame.mask test suite<commit_after>
|
#################################### IMPORTS ###################################
if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests.test_utils import test_not_implemented, unittest
else:
from test.test_utils import test_not_implemented, unittest
from pygame import mask
# Stuff needed for tests
from pygame import surface
from pygame.locals import SRCALPHA
################################################################################
class MaskTestModule(unittest.TestCase):
def test_from_surface(self):
surf = surface.Surface((300, 100), depth=32, flags=SRCALPHA)
surf.fill((0, 0, 0, 0xff))
for x in range(200):
surf.set_at((x, 20), (0, 0, 0, x))
M = mask.from_surface(surf)
self.assertEqual(M.get_at((0, 0)), 1)
self.assertEqual(M.get_at((20, 20)), 0)
self.assertEqual(M.get_at((21, 20)), 0)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((127, 20)), 0)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
self.assertEqual(M.get_at((200, 20)), 1)
self.assertEqual(M.get_at((21, 21)), 1)
# Different threshold
M = mask.from_surface(surf, 50)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((51, 20)), 1)
self.assertEqual(M.get_at((127, 20)), 1)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
################################################################################
if __name__ == '__main__':
unittest.main()
|
Add something of a pygame.mask test suite#################################### IMPORTS ###################################
if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests.test_utils import test_not_implemented, unittest
else:
from test.test_utils import test_not_implemented, unittest
from pygame import mask
# Stuff needed for tests
from pygame import surface
from pygame.locals import SRCALPHA
################################################################################
class MaskTestModule(unittest.TestCase):
def test_from_surface(self):
surf = surface.Surface((300, 100), depth=32, flags=SRCALPHA)
surf.fill((0, 0, 0, 0xff))
for x in range(200):
surf.set_at((x, 20), (0, 0, 0, x))
M = mask.from_surface(surf)
self.assertEqual(M.get_at((0, 0)), 1)
self.assertEqual(M.get_at((20, 20)), 0)
self.assertEqual(M.get_at((21, 20)), 0)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((127, 20)), 0)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
self.assertEqual(M.get_at((200, 20)), 1)
self.assertEqual(M.get_at((21, 21)), 1)
# Different threshold
M = mask.from_surface(surf, 50)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((51, 20)), 1)
self.assertEqual(M.get_at((127, 20)), 1)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
################################################################################
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add something of a pygame.mask test suite<commit_after>#################################### IMPORTS ###################################
if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests.test_utils import test_not_implemented, unittest
else:
from test.test_utils import test_not_implemented, unittest
from pygame import mask
# Stuff needed for tests
from pygame import surface
from pygame.locals import SRCALPHA
################################################################################
class MaskTestModule(unittest.TestCase):
def test_from_surface(self):
surf = surface.Surface((300, 100), depth=32, flags=SRCALPHA)
surf.fill((0, 0, 0, 0xff))
for x in range(200):
surf.set_at((x, 20), (0, 0, 0, x))
M = mask.from_surface(surf)
self.assertEqual(M.get_at((0, 0)), 1)
self.assertEqual(M.get_at((20, 20)), 0)
self.assertEqual(M.get_at((21, 20)), 0)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((127, 20)), 0)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
self.assertEqual(M.get_at((200, 20)), 1)
self.assertEqual(M.get_at((21, 21)), 1)
# Different threshold
M = mask.from_surface(surf, 50)
self.assertEqual(M.get_at((50, 20)), 0)
self.assertEqual(M.get_at((51, 20)), 1)
self.assertEqual(M.get_at((127, 20)), 1)
self.assertEqual(M.get_at((128, 20)), 1)
self.assertEqual(M.get_at((129, 20)), 1)
################################################################################
if __name__ == '__main__':
unittest.main()
|
|
8882c77e6b96df7548357bce0a1c995fa2600e39
|
test/test_iebi.py
|
test/test_iebi.py
|
# -*- coding: utf-8 -*-
import numpy as np
import ibei
from astropy import units
import unittest
temp_sun = 5762.
temp_earth = 288.
bandgap = 1.15
class Issues(unittest.TestCase):
"""
Tests output types of the calculator methods.
"""
def test_issue_1_devos_efficiency(self):
"""
Unit system needs to be specified for astropy.constants.e to work.
"""
try:
ibei.devos_efficiency(bandgap, temp_sun, temp_earth, 1.09)
except:
self.fail("Unit system not initialized.")
|
Add tests for issues causing exceptions
|
Add tests for issues causing exceptions
|
Python
|
mit
|
jrsmith3/tec,jrsmith3/tec,jrsmith3/ibei
|
Add tests for issues causing exceptions
|
# -*- coding: utf-8 -*-
import numpy as np
import ibei
from astropy import units
import unittest
temp_sun = 5762.
temp_earth = 288.
bandgap = 1.15
class Issues(unittest.TestCase):
"""
Tests output types of the calculator methods.
"""
def test_issue_1_devos_efficiency(self):
"""
Unit system needs to be specified for astropy.constants.e to work.
"""
try:
ibei.devos_efficiency(bandgap, temp_sun, temp_earth, 1.09)
except:
self.fail("Unit system not initialized.")
|
<commit_before><commit_msg>Add tests for issues causing exceptions<commit_after>
|
# -*- coding: utf-8 -*-
import numpy as np
import ibei
from astropy import units
import unittest
temp_sun = 5762.
temp_earth = 288.
bandgap = 1.15
class Issues(unittest.TestCase):
"""
Tests output types of the calculator methods.
"""
def test_issue_1_devos_efficiency(self):
"""
Unit system needs to be specified for astropy.constants.e to work.
"""
try:
ibei.devos_efficiency(bandgap, temp_sun, temp_earth, 1.09)
except:
self.fail("Unit system not initialized.")
|
Add tests for issues causing exceptions# -*- coding: utf-8 -*-
import numpy as np
import ibei
from astropy import units
import unittest
temp_sun = 5762.
temp_earth = 288.
bandgap = 1.15
class Issues(unittest.TestCase):
"""
Tests output types of the calculator methods.
"""
def test_issue_1_devos_efficiency(self):
"""
Unit system needs to be specified for astropy.constants.e to work.
"""
try:
ibei.devos_efficiency(bandgap, temp_sun, temp_earth, 1.09)
except:
self.fail("Unit system not initialized.")
|
<commit_before><commit_msg>Add tests for issues causing exceptions<commit_after># -*- coding: utf-8 -*-
import numpy as np
import ibei
from astropy import units
import unittest
temp_sun = 5762.
temp_earth = 288.
bandgap = 1.15
class Issues(unittest.TestCase):
"""
Tests output types of the calculator methods.
"""
def test_issue_1_devos_efficiency(self):
"""
Unit system needs to be specified for astropy.constants.e to work.
"""
try:
ibei.devos_efficiency(bandgap, temp_sun, temp_earth, 1.09)
except:
self.fail("Unit system not initialized.")
|
|
e0a363bce00dca2545fe33d947bbd7f04d1d7234
|
tests/test_dot.py
|
tests/test_dot.py
|
import pytest
from desmod.dot import component_to_dot
from desmod.component import Component
from desmod.simulation import SimEnvironment
@pytest.fixture
def top():
top = Top(parent=None, env=SimEnvironment(config={}))
top.elaborate()
return top
class Top(Component):
base_name = ''
def __init__(self, *args, **kwargs):
super(Top, self).__init__(*args, **kwargs)
self.a = A(self)
self.bs = [B(self, index=i) for i in range(5)]
def connect_children(self):
for b in self.bs:
self.connect(b, 'a')
class A(Component):
base_name = 'a'
class B(Component):
base_name = 'b'
def __init__(self, *args, **kwargs):
super(B, self).__init__(*args, **kwargs)
self.add_connections('a')
self.add_process(self.my_proc)
def my_proc(self):
yield self.env.timeout(1)
def test_hierarchy_only(top):
dot = component_to_dot(top, show_connections=False, show_processes=False)
assert '"a"' in dot
assert '"b0"' in dot
def test_connections_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_processes=False)
assert '"b0" -> "a"' in dot
def test_processes_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_connections=False)
assert 'my_proc' in dot
def test_all(top):
dot = component_to_dot(top, colorscheme='blues9')
assert 'my_proc' in dot
assert '"a"' in dot
assert '"b0"' in dot
assert '"b0" -> "a"' in dot
|
Add unit tests for desmod.dot
|
Add unit tests for desmod.dot
|
Python
|
mit
|
SanDisk-Open-Source/desmod,bgmerrell/desmod
|
Add unit tests for desmod.dot
|
import pytest
from desmod.dot import component_to_dot
from desmod.component import Component
from desmod.simulation import SimEnvironment
@pytest.fixture
def top():
top = Top(parent=None, env=SimEnvironment(config={}))
top.elaborate()
return top
class Top(Component):
base_name = ''
def __init__(self, *args, **kwargs):
super(Top, self).__init__(*args, **kwargs)
self.a = A(self)
self.bs = [B(self, index=i) for i in range(5)]
def connect_children(self):
for b in self.bs:
self.connect(b, 'a')
class A(Component):
base_name = 'a'
class B(Component):
base_name = 'b'
def __init__(self, *args, **kwargs):
super(B, self).__init__(*args, **kwargs)
self.add_connections('a')
self.add_process(self.my_proc)
def my_proc(self):
yield self.env.timeout(1)
def test_hierarchy_only(top):
dot = component_to_dot(top, show_connections=False, show_processes=False)
assert '"a"' in dot
assert '"b0"' in dot
def test_connections_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_processes=False)
assert '"b0" -> "a"' in dot
def test_processes_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_connections=False)
assert 'my_proc' in dot
def test_all(top):
dot = component_to_dot(top, colorscheme='blues9')
assert 'my_proc' in dot
assert '"a"' in dot
assert '"b0"' in dot
assert '"b0" -> "a"' in dot
|
<commit_before><commit_msg>Add unit tests for desmod.dot<commit_after>
|
import pytest
from desmod.dot import component_to_dot
from desmod.component import Component
from desmod.simulation import SimEnvironment
@pytest.fixture
def top():
top = Top(parent=None, env=SimEnvironment(config={}))
top.elaborate()
return top
class Top(Component):
base_name = ''
def __init__(self, *args, **kwargs):
super(Top, self).__init__(*args, **kwargs)
self.a = A(self)
self.bs = [B(self, index=i) for i in range(5)]
def connect_children(self):
for b in self.bs:
self.connect(b, 'a')
class A(Component):
base_name = 'a'
class B(Component):
base_name = 'b'
def __init__(self, *args, **kwargs):
super(B, self).__init__(*args, **kwargs)
self.add_connections('a')
self.add_process(self.my_proc)
def my_proc(self):
yield self.env.timeout(1)
def test_hierarchy_only(top):
dot = component_to_dot(top, show_connections=False, show_processes=False)
assert '"a"' in dot
assert '"b0"' in dot
def test_connections_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_processes=False)
assert '"b0" -> "a"' in dot
def test_processes_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_connections=False)
assert 'my_proc' in dot
def test_all(top):
dot = component_to_dot(top, colorscheme='blues9')
assert 'my_proc' in dot
assert '"a"' in dot
assert '"b0"' in dot
assert '"b0" -> "a"' in dot
|
Add unit tests for desmod.dotimport pytest
from desmod.dot import component_to_dot
from desmod.component import Component
from desmod.simulation import SimEnvironment
@pytest.fixture
def top():
top = Top(parent=None, env=SimEnvironment(config={}))
top.elaborate()
return top
class Top(Component):
base_name = ''
def __init__(self, *args, **kwargs):
super(Top, self).__init__(*args, **kwargs)
self.a = A(self)
self.bs = [B(self, index=i) for i in range(5)]
def connect_children(self):
for b in self.bs:
self.connect(b, 'a')
class A(Component):
base_name = 'a'
class B(Component):
base_name = 'b'
def __init__(self, *args, **kwargs):
super(B, self).__init__(*args, **kwargs)
self.add_connections('a')
self.add_process(self.my_proc)
def my_proc(self):
yield self.env.timeout(1)
def test_hierarchy_only(top):
dot = component_to_dot(top, show_connections=False, show_processes=False)
assert '"a"' in dot
assert '"b0"' in dot
def test_connections_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_processes=False)
assert '"b0" -> "a"' in dot
def test_processes_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_connections=False)
assert 'my_proc' in dot
def test_all(top):
dot = component_to_dot(top, colorscheme='blues9')
assert 'my_proc' in dot
assert '"a"' in dot
assert '"b0"' in dot
assert '"b0" -> "a"' in dot
|
<commit_before><commit_msg>Add unit tests for desmod.dot<commit_after>import pytest
from desmod.dot import component_to_dot
from desmod.component import Component
from desmod.simulation import SimEnvironment
@pytest.fixture
def top():
top = Top(parent=None, env=SimEnvironment(config={}))
top.elaborate()
return top
class Top(Component):
base_name = ''
def __init__(self, *args, **kwargs):
super(Top, self).__init__(*args, **kwargs)
self.a = A(self)
self.bs = [B(self, index=i) for i in range(5)]
def connect_children(self):
for b in self.bs:
self.connect(b, 'a')
class A(Component):
base_name = 'a'
class B(Component):
base_name = 'b'
def __init__(self, *args, **kwargs):
super(B, self).__init__(*args, **kwargs)
self.add_connections('a')
self.add_process(self.my_proc)
def my_proc(self):
yield self.env.timeout(1)
def test_hierarchy_only(top):
dot = component_to_dot(top, show_connections=False, show_processes=False)
assert '"a"' in dot
assert '"b0"' in dot
def test_connections_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_processes=False)
assert '"b0" -> "a"' in dot
def test_processes_only(top):
dot = component_to_dot(top, show_hierarchy=False, show_connections=False)
assert 'my_proc' in dot
def test_all(top):
dot = component_to_dot(top, colorscheme='blues9')
assert 'my_proc' in dot
assert '"a"' in dot
assert '"b0"' in dot
assert '"b0" -> "a"' in dot
|
|
3f0c1c24528d8ce818434d8b553136d315b0d548
|
tests/basics/try_finally2.py
|
tests/basics/try_finally2.py
|
# check that the Python stack does not overflow when the finally
# block itself uses more stack than the rest of the function
def f1(a, b):
pass
def test1():
val = 1
try:
raise ValueError()
finally:
f1(2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test1()
except ValueError:
pass
# same as above but with 3 args instead of 2, to use an extra stack entry
def f2(a, b, c):
pass
def test2():
val = 1
try:
raise ValueError()
finally:
f2(2, 2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test2()
except ValueError:
pass
|
Add test case for overflowing Py stack in try-finally.
|
tests/basics: Add test case for overflowing Py stack in try-finally.
|
Python
|
mit
|
blazewicz/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,trezor/micropython,toolmacher/micropython,mhoffma/micropython,tobbad/micropython,dxxb/micropython,deshipu/micropython,henriknelson/micropython,turbinenreiter/micropython,infinnovation/micropython,MrSurly/micropython,selste/micropython,pramasoul/micropython,adafruit/circuitpython,micropython/micropython-esp32,deshipu/micropython,tuc-osg/micropython,lowRISC/micropython,cwyark/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,pfalcon/micropython,pozetroninc/micropython,pfalcon/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,lowRISC/micropython,dmazzella/micropython,AriZuu/micropython,tralamazza/micropython,chrisdearman/micropython,henriknelson/micropython,oopy/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,lowRISC/micropython,kerneltask/micropython,hosaka/micropython,hiway/micropython,matthewelse/micropython,pozetroninc/micropython,selste/micropython,SHA2017-badge/micropython-esp32,pfalcon/micropython,MrSurly/micropython,alex-march/micropython,adafruit/micropython,toolmacher/micropython,pozetroninc/micropython,puuu/micropython,lowRISC/micropython,bvernoux/micropython,toolmacher/micropython,TDAbboud/micropython,matthewelse/micropython,mhoffma/micropython,chrisdearman/micropython,alex-robbins/micropython,bvernoux/micropython,alex-robbins/micropython,micropython/micropython-esp32,swegener/micropython,pfalcon/micropython,ryannathans/micropython,hosaka/micropython,dmazzella/micropython,kerneltask/micropython,TDAbboud/micropython,alex-robbins/micropython,lowRISC/micropython,Timmenem/micropython,bvernoux/micropython,torwag/micropython,selste/micropython,cwyark/micropython,pfalcon/micropython,Timmenem/micropython,adafruit/circuitpython,torwag/micropython,deshipu/micropython,adafruit/circuitpython,infinnovation/micropython,deshipu/micropython,TDAbboud/micropython,oopy/micropython,alex-robbins/micropython,AriZuu/micropython,oopy/micropython,tobbad/micropython,torwag/micropython,AriZuu/micropython,dxxb/micropython,tralamazza/micropython,Peetz0r/micropython-esp32,ryannathans/micropython,jmarcelino/pycom-micropython,tuc-osg/micropython,TDAbboud/micropython,mhoffma/micropython,puuu/micropython,AriZuu/micropython,toolmacher/micropython,adafruit/micropython,MrSurly/micropython,Timmenem/micropython,deshipu/micropython,swegener/micropython,cwyark/micropython,trezor/micropython,adafruit/micropython,tuc-osg/micropython,pramasoul/micropython,tralamazza/micropython,pramasoul/micropython,selste/micropython,turbinenreiter/micropython,hosaka/micropython,puuu/micropython,alex-march/micropython,Timmenem/micropython,blazewicz/micropython,selste/micropython,TDAbboud/micropython,adafruit/micropython,matthewelse/micropython,tralamazza/micropython,kerneltask/micropython,MrSurly/micropython-esp32,PappaPeppar/micropython,SHA2017-badge/micropython-esp32,pramasoul/micropython,tobbad/micropython,swegener/micropython,MrSurly/micropython-esp32,adafruit/micropython,PappaPeppar/micropython,hiway/micropython,infinnovation/micropython,turbinenreiter/micropython,micropython/micropython-esp32,toolmacher/micropython,MrSurly/micropython-esp32,jmarcelino/pycom-micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,tuc-osg/micropython,alex-robbins/micropython,infinnovation/micropython,henriknelson/micropython,alex-march/micropython,matthewelse/micropython,blazewicz/micropython,alex-march/micropython,PappaPeppar/micropython,HenrikSolver/micropython,bvernoux/micropython,pramasoul/micropython,HenrikSolver/micropython,SHA2017-badge/micropython-esp32,HenrikSolver/micropython,hiway/micropython,matthewelse/micropython,jmarcelino/pycom-micropython,trezor/micropython,MrSurly/micropython,HenrikSolver/micropython,dmazzella/micropython,jmarcelino/pycom-micropython,oopy/micropython,chrisdearman/micropython,micropython/micropython-esp32,matthewelse/micropython,cwyark/micropython,dmazzella/micropython,henriknelson/micropython,hiway/micropython,dxxb/micropython,micropython/micropython-esp32,turbinenreiter/micropython,adafruit/circuitpython,swegener/micropython,turbinenreiter/micropython,torwag/micropython,jmarcelino/pycom-micropython,hosaka/micropython,chrisdearman/micropython,cwyark/micropython,torwag/micropython,MrSurly/micropython-esp32,tobbad/micropython,ryannathans/micropython,MrSurly/micropython,oopy/micropython,kerneltask/micropython,swegener/micropython,Peetz0r/micropython-esp32,puuu/micropython,trezor/micropython,pozetroninc/micropython,mhoffma/micropython,dxxb/micropython,Timmenem/micropython,mhoffma/micropython,henriknelson/micropython,hosaka/micropython,blazewicz/micropython,bvernoux/micropython,tobbad/micropython,Peetz0r/micropython-esp32,puuu/micropython,trezor/micropython,dxxb/micropython,ryannathans/micropython,hiway/micropython,PappaPeppar/micropython,PappaPeppar/micropython,ryannathans/micropython,AriZuu/micropython,pozetroninc/micropython,adafruit/circuitpython,blazewicz/micropython,infinnovation/micropython,alex-march/micropython
|
tests/basics: Add test case for overflowing Py stack in try-finally.
|
# check that the Python stack does not overflow when the finally
# block itself uses more stack than the rest of the function
def f1(a, b):
pass
def test1():
val = 1
try:
raise ValueError()
finally:
f1(2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test1()
except ValueError:
pass
# same as above but with 3 args instead of 2, to use an extra stack entry
def f2(a, b, c):
pass
def test2():
val = 1
try:
raise ValueError()
finally:
f2(2, 2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test2()
except ValueError:
pass
|
<commit_before><commit_msg>tests/basics: Add test case for overflowing Py stack in try-finally.<commit_after>
|
# check that the Python stack does not overflow when the finally
# block itself uses more stack than the rest of the function
def f1(a, b):
pass
def test1():
val = 1
try:
raise ValueError()
finally:
f1(2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test1()
except ValueError:
pass
# same as above but with 3 args instead of 2, to use an extra stack entry
def f2(a, b, c):
pass
def test2():
val = 1
try:
raise ValueError()
finally:
f2(2, 2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test2()
except ValueError:
pass
|
tests/basics: Add test case for overflowing Py stack in try-finally.# check that the Python stack does not overflow when the finally
# block itself uses more stack than the rest of the function
def f1(a, b):
pass
def test1():
val = 1
try:
raise ValueError()
finally:
f1(2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test1()
except ValueError:
pass
# same as above but with 3 args instead of 2, to use an extra stack entry
def f2(a, b, c):
pass
def test2():
val = 1
try:
raise ValueError()
finally:
f2(2, 2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test2()
except ValueError:
pass
|
<commit_before><commit_msg>tests/basics: Add test case for overflowing Py stack in try-finally.<commit_after># check that the Python stack does not overflow when the finally
# block itself uses more stack than the rest of the function
def f1(a, b):
pass
def test1():
val = 1
try:
raise ValueError()
finally:
f1(2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test1()
except ValueError:
pass
# same as above but with 3 args instead of 2, to use an extra stack entry
def f2(a, b, c):
pass
def test2():
val = 1
try:
raise ValueError()
finally:
f2(2, 2, 2) # use some stack
print(val) # check that the local variable is the same
try:
test2()
except ValueError:
pass
|
|
12d90ff5a24bff3151219187b960ac7f5eecc146
|
tools/heapcheck/PRESUBMIT.py
|
tools/heapcheck/PRESUBMIT.py
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def CheckChange(input_api, output_api):
"""Checks the memcheck suppressions files for bad data."""
errors = []
skip_next_line = False
func_re = input_api.re.compile('[a-z_.]+\(.+\)$')
for f, line_num, line in input_api.RightHandSideLines(lambda x:
x.LocalPath().endswith('.txt')):
line = line.lstrip()
if line.startswith('#') or not line:
continue
if skip_next_line:
skip_next_line = False
continue
if line == '{':
skip_next_line = True
continue
if (line.startswith('fun:') or line.startswith('obj:') or
line == 'Heapcheck:Leak' or line == '}' or
line == '...'):
continue
if func_re.match(line):
continue
errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
Add presubmit checks for suppressions.
|
Heapchecker: Add presubmit checks for suppressions.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/3197014
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@57132 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,adobe/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,yitian134/chromium,yitian134/chromium,ropik/chromium,ropik/chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,gavinp/chromium,gavinp/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,adobe/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,adobe/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,gavinp/chromium,ropik/chromium,yitian134/chromium,yitian134/chromium,ropik/chromium
|
Heapchecker: Add presubmit checks for suppressions.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/3197014
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@57132 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def CheckChange(input_api, output_api):
"""Checks the memcheck suppressions files for bad data."""
errors = []
skip_next_line = False
func_re = input_api.re.compile('[a-z_.]+\(.+\)$')
for f, line_num, line in input_api.RightHandSideLines(lambda x:
x.LocalPath().endswith('.txt')):
line = line.lstrip()
if line.startswith('#') or not line:
continue
if skip_next_line:
skip_next_line = False
continue
if line == '{':
skip_next_line = True
continue
if (line.startswith('fun:') or line.startswith('obj:') or
line == 'Heapcheck:Leak' or line == '}' or
line == '...'):
continue
if func_re.match(line):
continue
errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
<commit_before><commit_msg>Heapchecker: Add presubmit checks for suppressions.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/3197014
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@57132 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def CheckChange(input_api, output_api):
"""Checks the memcheck suppressions files for bad data."""
errors = []
skip_next_line = False
func_re = input_api.re.compile('[a-z_.]+\(.+\)$')
for f, line_num, line in input_api.RightHandSideLines(lambda x:
x.LocalPath().endswith('.txt')):
line = line.lstrip()
if line.startswith('#') or not line:
continue
if skip_next_line:
skip_next_line = False
continue
if line == '{':
skip_next_line = True
continue
if (line.startswith('fun:') or line.startswith('obj:') or
line == 'Heapcheck:Leak' or line == '}' or
line == '...'):
continue
if func_re.match(line):
continue
errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
Heapchecker: Add presubmit checks for suppressions.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/3197014
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@57132 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def CheckChange(input_api, output_api):
"""Checks the memcheck suppressions files for bad data."""
errors = []
skip_next_line = False
func_re = input_api.re.compile('[a-z_.]+\(.+\)$')
for f, line_num, line in input_api.RightHandSideLines(lambda x:
x.LocalPath().endswith('.txt')):
line = line.lstrip()
if line.startswith('#') or not line:
continue
if skip_next_line:
skip_next_line = False
continue
if line == '{':
skip_next_line = True
continue
if (line.startswith('fun:') or line.startswith('obj:') or
line == 'Heapcheck:Leak' or line == '}' or
line == '...'):
continue
if func_re.match(line):
continue
errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
<commit_before><commit_msg>Heapchecker: Add presubmit checks for suppressions.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/3197014
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@57132 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
def CheckChange(input_api, output_api):
"""Checks the memcheck suppressions files for bad data."""
errors = []
skip_next_line = False
func_re = input_api.re.compile('[a-z_.]+\(.+\)$')
for f, line_num, line in input_api.RightHandSideLines(lambda x:
x.LocalPath().endswith('.txt')):
line = line.lstrip()
if line.startswith('#') or not line:
continue
if skip_next_line:
skip_next_line = False
continue
if line == '{':
skip_next_line = True
continue
if (line.startswith('fun:') or line.startswith('obj:') or
line == 'Heapcheck:Leak' or line == '}' or
line == '...'):
continue
if func_re.match(line):
continue
errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
|
|
17180021f96897a659d472555c3e2588ca94d41b
|
hours_slept_time_series.py
|
hours_slept_time_series.py
|
import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
dates = list(raw_data.keys())
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = (wake - sleep).seconds / 3600
total += delta_h
sleep_durations.append(total)
data = [go.Scatter(x=dates, y=sleep_durations)]
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
Add sleep duration time series plotter
|
Add sleep duration time series plotter
|
Python
|
mit
|
f-jiang/sleep-pattern-grapher
|
Add sleep duration time series plotter
|
import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
dates = list(raw_data.keys())
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = (wake - sleep).seconds / 3600
total += delta_h
sleep_durations.append(total)
data = [go.Scatter(x=dates, y=sleep_durations)]
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
<commit_before><commit_msg>Add sleep duration time series plotter<commit_after>
|
import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
dates = list(raw_data.keys())
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = (wake - sleep).seconds / 3600
total += delta_h
sleep_durations.append(total)
data = [go.Scatter(x=dates, y=sleep_durations)]
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
Add sleep duration time series plotterimport plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
dates = list(raw_data.keys())
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = (wake - sleep).seconds / 3600
total += delta_h
sleep_durations.append(total)
data = [go.Scatter(x=dates, y=sleep_durations)]
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
<commit_before><commit_msg>Add sleep duration time series plotter<commit_after>import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
dates = list(raw_data.keys())
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = (wake - sleep).seconds / 3600
total += delta_h
sleep_durations.append(total)
data = [go.Scatter(x=dates, y=sleep_durations)]
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
|
db6871ee917cccf9f9d9010f60521e3454f66ea8
|
scripts/create-user.py
|
scripts/create-user.py
|
#!/usr/bin/python
# This is a small helper script to create a CATMAID user.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_connection
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
if len(sys.argv) != 3:
print >> sys.stderr, "Usage: create-project.py <USERNAME> <LONG-NAME>"
sys.exit(1)
username = sys.argv[1]
full_name = sys.argv[2]
# Now get a password from the user:
p1 = getpass.getpass()
p2 = getpass.getpass("Confirm passsword: ")
if p1 != p2:
print >> sys.stderr, "The passwords didn't match."
sys.exit(2)
c = db_connection.cursor()
try:
c.execute('INSERT INTO "user" (name, pwd, longname) VALUES (%s, md5(%s), %s) RETURNING id',
(username, p1, full_name))
except IntegrityError, e:
print >> sys.stderr, "There is already a user called '%s'" % (username,)
sys.exit(3)
user_id = c.fetchone()[0]
print "Created the user '%s' with ID: %d" % (username, user_id)
db_connection.commit()
c.close()
db_connection.close()
|
Add a script for creating a new user
|
Add a script for creating a new user
|
Python
|
agpl-3.0
|
fzadow/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID
|
Add a script for creating a new user
|
#!/usr/bin/python
# This is a small helper script to create a CATMAID user.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_connection
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
if len(sys.argv) != 3:
print >> sys.stderr, "Usage: create-project.py <USERNAME> <LONG-NAME>"
sys.exit(1)
username = sys.argv[1]
full_name = sys.argv[2]
# Now get a password from the user:
p1 = getpass.getpass()
p2 = getpass.getpass("Confirm passsword: ")
if p1 != p2:
print >> sys.stderr, "The passwords didn't match."
sys.exit(2)
c = db_connection.cursor()
try:
c.execute('INSERT INTO "user" (name, pwd, longname) VALUES (%s, md5(%s), %s) RETURNING id',
(username, p1, full_name))
except IntegrityError, e:
print >> sys.stderr, "There is already a user called '%s'" % (username,)
sys.exit(3)
user_id = c.fetchone()[0]
print "Created the user '%s' with ID: %d" % (username, user_id)
db_connection.commit()
c.close()
db_connection.close()
|
<commit_before><commit_msg>Add a script for creating a new user<commit_after>
|
#!/usr/bin/python
# This is a small helper script to create a CATMAID user.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_connection
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
if len(sys.argv) != 3:
print >> sys.stderr, "Usage: create-project.py <USERNAME> <LONG-NAME>"
sys.exit(1)
username = sys.argv[1]
full_name = sys.argv[2]
# Now get a password from the user:
p1 = getpass.getpass()
p2 = getpass.getpass("Confirm passsword: ")
if p1 != p2:
print >> sys.stderr, "The passwords didn't match."
sys.exit(2)
c = db_connection.cursor()
try:
c.execute('INSERT INTO "user" (name, pwd, longname) VALUES (%s, md5(%s), %s) RETURNING id',
(username, p1, full_name))
except IntegrityError, e:
print >> sys.stderr, "There is already a user called '%s'" % (username,)
sys.exit(3)
user_id = c.fetchone()[0]
print "Created the user '%s' with ID: %d" % (username, user_id)
db_connection.commit()
c.close()
db_connection.close()
|
Add a script for creating a new user#!/usr/bin/python
# This is a small helper script to create a CATMAID user.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_connection
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
if len(sys.argv) != 3:
print >> sys.stderr, "Usage: create-project.py <USERNAME> <LONG-NAME>"
sys.exit(1)
username = sys.argv[1]
full_name = sys.argv[2]
# Now get a password from the user:
p1 = getpass.getpass()
p2 = getpass.getpass("Confirm passsword: ")
if p1 != p2:
print >> sys.stderr, "The passwords didn't match."
sys.exit(2)
c = db_connection.cursor()
try:
c.execute('INSERT INTO "user" (name, pwd, longname) VALUES (%s, md5(%s), %s) RETURNING id',
(username, p1, full_name))
except IntegrityError, e:
print >> sys.stderr, "There is already a user called '%s'" % (username,)
sys.exit(3)
user_id = c.fetchone()[0]
print "Created the user '%s' with ID: %d" % (username, user_id)
db_connection.commit()
c.close()
db_connection.close()
|
<commit_before><commit_msg>Add a script for creating a new user<commit_after>#!/usr/bin/python
# This is a small helper script to create a CATMAID user.
# You may need to install psycopg2, e.g. with:
# sudo apt-get install python-psycopg2
# Requires the file .catmaid-db to be present in your
# home directory, with the following format:
#
# host: localhost
# database: catmaid
# username: catmaid_user
# password: password_of_your_catmaid_user
import sys
import os
from common import db_connection
from subprocess import check_call
import getpass
from psycopg2 import IntegrityError
if len(sys.argv) != 3:
print >> sys.stderr, "Usage: create-project.py <USERNAME> <LONG-NAME>"
sys.exit(1)
username = sys.argv[1]
full_name = sys.argv[2]
# Now get a password from the user:
p1 = getpass.getpass()
p2 = getpass.getpass("Confirm passsword: ")
if p1 != p2:
print >> sys.stderr, "The passwords didn't match."
sys.exit(2)
c = db_connection.cursor()
try:
c.execute('INSERT INTO "user" (name, pwd, longname) VALUES (%s, md5(%s), %s) RETURNING id',
(username, p1, full_name))
except IntegrityError, e:
print >> sys.stderr, "There is already a user called '%s'" % (username,)
sys.exit(3)
user_id = c.fetchone()[0]
print "Created the user '%s' with ID: %d" % (username, user_id)
db_connection.commit()
c.close()
db_connection.close()
|
|
fba9cb1278afab3c16ec4077770a33c09950e1a7
|
waterbutler/core/path.py
|
waterbutler/core/path.py
|
import os
class WaterButlerPathPart:
DECODE = lambda x: x
ENCODE = lambda x: x
def __init__(self, part, _id=None):
self._id = _id
self._part = part
@property
def identifier(self):
return self._id
@property
def value(self):
return self.__class__.DECODE(self._part)
@property
def raw(self):
return self._part
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._part)
class WaterButlerPath:
"""
A standardized and validated immutable WaterButler path.
"""
PART_CLASS = WaterButlerPathPart
@classmethod
def from_parts(cls, parts, folder=False):
_ids, _parts = [], ['']
for part in parts:
_ids.append(part.identifier)
_parts.append(part.raw)
return cls('/'.join(_parts), _ids=_ids, folder=folder)
def __init__(self, path, _ids=(), prepend=None, folder=None):
self._generic_path_validation(path)
self._orig_path = path
path = path.strip('/').split('/')
_ids = [None] * len(_ids) - len(path) + _ids
self._parts = [
self.PART_CLASS(part, _id)
for part, _id
in zip(path, _ids)
]
if folder is not None:
self.is_folder = bool(folder)
else:
self.is_folder = self.path.endswith('/')
@property
def parts(self):
return self._parts
@property
def name(self):
return self._parts[-1].value
@property
def identifier(self):
return self._parts[-1].identifier
@property
def path(self):
return '/'.join([x.value for x in self.parts])
@property
def parent(self):
return self.__class__.from_parts(self.parts[:-1])
def _generic_path_validation(self, path):
pass
def __str__(self):
return '/'.join([''] + [x.raw for x in self.parts]) + ('/' if self.is_folder else '')
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._orig_path)
|
Add a new variant of WaterButlerPath
|
Add a new variant of WaterButlerPath
|
Python
|
apache-2.0
|
rdhyee/waterbutler,rafaeldelucena/waterbutler,CenterForOpenScience/waterbutler,TomBaxter/waterbutler,felliott/waterbutler,Johnetordoff/waterbutler,cosenal/waterbutler,icereval/waterbutler,chrisseto/waterbutler,hmoco/waterbutler,RCOSDP/waterbutler,Ghalko/waterbutler,kwierman/waterbutler
|
Add a new variant of WaterButlerPath
|
import os
class WaterButlerPathPart:
DECODE = lambda x: x
ENCODE = lambda x: x
def __init__(self, part, _id=None):
self._id = _id
self._part = part
@property
def identifier(self):
return self._id
@property
def value(self):
return self.__class__.DECODE(self._part)
@property
def raw(self):
return self._part
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._part)
class WaterButlerPath:
"""
A standardized and validated immutable WaterButler path.
"""
PART_CLASS = WaterButlerPathPart
@classmethod
def from_parts(cls, parts, folder=False):
_ids, _parts = [], ['']
for part in parts:
_ids.append(part.identifier)
_parts.append(part.raw)
return cls('/'.join(_parts), _ids=_ids, folder=folder)
def __init__(self, path, _ids=(), prepend=None, folder=None):
self._generic_path_validation(path)
self._orig_path = path
path = path.strip('/').split('/')
_ids = [None] * len(_ids) - len(path) + _ids
self._parts = [
self.PART_CLASS(part, _id)
for part, _id
in zip(path, _ids)
]
if folder is not None:
self.is_folder = bool(folder)
else:
self.is_folder = self.path.endswith('/')
@property
def parts(self):
return self._parts
@property
def name(self):
return self._parts[-1].value
@property
def identifier(self):
return self._parts[-1].identifier
@property
def path(self):
return '/'.join([x.value for x in self.parts])
@property
def parent(self):
return self.__class__.from_parts(self.parts[:-1])
def _generic_path_validation(self, path):
pass
def __str__(self):
return '/'.join([''] + [x.raw for x in self.parts]) + ('/' if self.is_folder else '')
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._orig_path)
|
<commit_before><commit_msg>Add a new variant of WaterButlerPath<commit_after>
|
import os
class WaterButlerPathPart:
DECODE = lambda x: x
ENCODE = lambda x: x
def __init__(self, part, _id=None):
self._id = _id
self._part = part
@property
def identifier(self):
return self._id
@property
def value(self):
return self.__class__.DECODE(self._part)
@property
def raw(self):
return self._part
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._part)
class WaterButlerPath:
"""
A standardized and validated immutable WaterButler path.
"""
PART_CLASS = WaterButlerPathPart
@classmethod
def from_parts(cls, parts, folder=False):
_ids, _parts = [], ['']
for part in parts:
_ids.append(part.identifier)
_parts.append(part.raw)
return cls('/'.join(_parts), _ids=_ids, folder=folder)
def __init__(self, path, _ids=(), prepend=None, folder=None):
self._generic_path_validation(path)
self._orig_path = path
path = path.strip('/').split('/')
_ids = [None] * len(_ids) - len(path) + _ids
self._parts = [
self.PART_CLASS(part, _id)
for part, _id
in zip(path, _ids)
]
if folder is not None:
self.is_folder = bool(folder)
else:
self.is_folder = self.path.endswith('/')
@property
def parts(self):
return self._parts
@property
def name(self):
return self._parts[-1].value
@property
def identifier(self):
return self._parts[-1].identifier
@property
def path(self):
return '/'.join([x.value for x in self.parts])
@property
def parent(self):
return self.__class__.from_parts(self.parts[:-1])
def _generic_path_validation(self, path):
pass
def __str__(self):
return '/'.join([''] + [x.raw for x in self.parts]) + ('/' if self.is_folder else '')
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._orig_path)
|
Add a new variant of WaterButlerPathimport os
class WaterButlerPathPart:
DECODE = lambda x: x
ENCODE = lambda x: x
def __init__(self, part, _id=None):
self._id = _id
self._part = part
@property
def identifier(self):
return self._id
@property
def value(self):
return self.__class__.DECODE(self._part)
@property
def raw(self):
return self._part
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._part)
class WaterButlerPath:
"""
A standardized and validated immutable WaterButler path.
"""
PART_CLASS = WaterButlerPathPart
@classmethod
def from_parts(cls, parts, folder=False):
_ids, _parts = [], ['']
for part in parts:
_ids.append(part.identifier)
_parts.append(part.raw)
return cls('/'.join(_parts), _ids=_ids, folder=folder)
def __init__(self, path, _ids=(), prepend=None, folder=None):
self._generic_path_validation(path)
self._orig_path = path
path = path.strip('/').split('/')
_ids = [None] * len(_ids) - len(path) + _ids
self._parts = [
self.PART_CLASS(part, _id)
for part, _id
in zip(path, _ids)
]
if folder is not None:
self.is_folder = bool(folder)
else:
self.is_folder = self.path.endswith('/')
@property
def parts(self):
return self._parts
@property
def name(self):
return self._parts[-1].value
@property
def identifier(self):
return self._parts[-1].identifier
@property
def path(self):
return '/'.join([x.value for x in self.parts])
@property
def parent(self):
return self.__class__.from_parts(self.parts[:-1])
def _generic_path_validation(self, path):
pass
def __str__(self):
return '/'.join([''] + [x.raw for x in self.parts]) + ('/' if self.is_folder else '')
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._orig_path)
|
<commit_before><commit_msg>Add a new variant of WaterButlerPath<commit_after>import os
class WaterButlerPathPart:
DECODE = lambda x: x
ENCODE = lambda x: x
def __init__(self, part, _id=None):
self._id = _id
self._part = part
@property
def identifier(self):
return self._id
@property
def value(self):
return self.__class__.DECODE(self._part)
@property
def raw(self):
return self._part
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._part)
class WaterButlerPath:
"""
A standardized and validated immutable WaterButler path.
"""
PART_CLASS = WaterButlerPathPart
@classmethod
def from_parts(cls, parts, folder=False):
_ids, _parts = [], ['']
for part in parts:
_ids.append(part.identifier)
_parts.append(part.raw)
return cls('/'.join(_parts), _ids=_ids, folder=folder)
def __init__(self, path, _ids=(), prepend=None, folder=None):
self._generic_path_validation(path)
self._orig_path = path
path = path.strip('/').split('/')
_ids = [None] * len(_ids) - len(path) + _ids
self._parts = [
self.PART_CLASS(part, _id)
for part, _id
in zip(path, _ids)
]
if folder is not None:
self.is_folder = bool(folder)
else:
self.is_folder = self.path.endswith('/')
@property
def parts(self):
return self._parts
@property
def name(self):
return self._parts[-1].value
@property
def identifier(self):
return self._parts[-1].identifier
@property
def path(self):
return '/'.join([x.value for x in self.parts])
@property
def parent(self):
return self.__class__.from_parts(self.parts[:-1])
def _generic_path_validation(self, path):
pass
def __str__(self):
return '/'.join([''] + [x.raw for x in self.parts]) + ('/' if self.is_folder else '')
def __repr__(self):
return '{}({!r})'.format(self.__class__.__name__, self._orig_path)
|
|
78fa145bca4416726acb7c279754589ba0b2d1c0
|
ProcessGCodeJob.py
|
ProcessGCodeJob.py
|
from UM.Job import Job
from UM.Application import Application
import os
class ProcessGCodeJob(Job):
def __init__(self, message):
super().__init__()
self._message = message
def run(self):
with open(self._message.filename) as f:
data = f.read(None)
Application.getInstance().getController().getScene().gcode = data
os.remove(self._message.filename)
|
Add a job to handle processing of GCode from the backend
|
Add a job to handle processing of GCode from the backend
|
Python
|
agpl-3.0
|
markwal/Cura,totalretribution/Cura,derekhe/Cura,lo0ol/Ultimaker-Cura,totalretribution/Cura,fieldOfView/Cura,bq/Ultimaker-Cura,hmflash/Cura,hmflash/Cura,Curahelper/Cura,fxtentacle/Cura,quillford/Cura,fieldOfView/Cura,markwal/Cura,fxtentacle/Cura,DeskboxBrazil/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,senttech/Cura,quillford/Cura,senttech/Cura,lo0ol/Ultimaker-Cura,ad1217/Cura,bq/Ultimaker-Cura,Curahelper/Cura,ad1217/Cura,derekhe/Cura,DeskboxBrazil/Cura
|
Add a job to handle processing of GCode from the backend
|
from UM.Job import Job
from UM.Application import Application
import os
class ProcessGCodeJob(Job):
def __init__(self, message):
super().__init__()
self._message = message
def run(self):
with open(self._message.filename) as f:
data = f.read(None)
Application.getInstance().getController().getScene().gcode = data
os.remove(self._message.filename)
|
<commit_before><commit_msg>Add a job to handle processing of GCode from the backend<commit_after>
|
from UM.Job import Job
from UM.Application import Application
import os
class ProcessGCodeJob(Job):
def __init__(self, message):
super().__init__()
self._message = message
def run(self):
with open(self._message.filename) as f:
data = f.read(None)
Application.getInstance().getController().getScene().gcode = data
os.remove(self._message.filename)
|
Add a job to handle processing of GCode from the backendfrom UM.Job import Job
from UM.Application import Application
import os
class ProcessGCodeJob(Job):
def __init__(self, message):
super().__init__()
self._message = message
def run(self):
with open(self._message.filename) as f:
data = f.read(None)
Application.getInstance().getController().getScene().gcode = data
os.remove(self._message.filename)
|
<commit_before><commit_msg>Add a job to handle processing of GCode from the backend<commit_after>from UM.Job import Job
from UM.Application import Application
import os
class ProcessGCodeJob(Job):
def __init__(self, message):
super().__init__()
self._message = message
def run(self):
with open(self._message.filename) as f:
data = f.read(None)
Application.getInstance().getController().getScene().gcode = data
os.remove(self._message.filename)
|
|
ae3a567c695477013d34316e160ab09055f8feeb
|
src/MultiProcessing.py
|
src/MultiProcessing.py
|
import multiprocessing
import time
class Consumer(multiprocessing.Process):
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
proc_name = self.name
while True:
next_task = self.task_queue.get()
if next_task is None: # exiting condition for the process.
print '%s: Exiting' % proc_name
self.task_queue.task_done()
break
print '%s: %s' % (proc_name, next_task)
answer = next_task()
self.task_queue.task_done()
self.result_queue.put(answer)
return
class Task(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __call__(self):
time.sleep(1) # pretend to take some time to do the work
return '%s * %s = %s' % (self.a, self.b, self.a * self.b)
def __str__(self):
return '%s * %s' % (self.a, self.b)
if __name__ == '__main__':
# Establish communication queues
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
# Start consumers
num_consumers = multiprocessing.cpu_count() * 2
print 'Creating %d consumers' % num_consumers
consumers = [ Consumer(tasks, results)
for i in xrange(num_consumers) ]
for w in consumers:
w.start()
# Enqueue jobs
num_jobs = 10
print 'Creating %d jobs'%num_jobs
for i in xrange(num_jobs):
tasks.put( Task(i, i) )
# Wait for all of the tasks to finish
tasks.join()
print 'Will add termination flags ...'
time.sleep(10)
# Start printing results
while num_jobs:
result = results.get()
print 'Result:', result
num_jobs -= 1
# Add termination flag for processes.
for i in xrange(num_consumers):
tasks.put( None )
|
Make changes to generate ts.csv and only one csv for each market for the day.
|
Make changes to generate ts.csv and only one csv for each market for the day.
|
Python
|
mit
|
aviatorBeijing/CapMarket,aviatorBeijing/CapMarket
|
Make changes to generate ts.csv and only one csv for each market for the day.
|
import multiprocessing
import time
class Consumer(multiprocessing.Process):
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
proc_name = self.name
while True:
next_task = self.task_queue.get()
if next_task is None: # exiting condition for the process.
print '%s: Exiting' % proc_name
self.task_queue.task_done()
break
print '%s: %s' % (proc_name, next_task)
answer = next_task()
self.task_queue.task_done()
self.result_queue.put(answer)
return
class Task(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __call__(self):
time.sleep(1) # pretend to take some time to do the work
return '%s * %s = %s' % (self.a, self.b, self.a * self.b)
def __str__(self):
return '%s * %s' % (self.a, self.b)
if __name__ == '__main__':
# Establish communication queues
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
# Start consumers
num_consumers = multiprocessing.cpu_count() * 2
print 'Creating %d consumers' % num_consumers
consumers = [ Consumer(tasks, results)
for i in xrange(num_consumers) ]
for w in consumers:
w.start()
# Enqueue jobs
num_jobs = 10
print 'Creating %d jobs'%num_jobs
for i in xrange(num_jobs):
tasks.put( Task(i, i) )
# Wait for all of the tasks to finish
tasks.join()
print 'Will add termination flags ...'
time.sleep(10)
# Start printing results
while num_jobs:
result = results.get()
print 'Result:', result
num_jobs -= 1
# Add termination flag for processes.
for i in xrange(num_consumers):
tasks.put( None )
|
<commit_before><commit_msg>Make changes to generate ts.csv and only one csv for each market for the day.<commit_after>
|
import multiprocessing
import time
class Consumer(multiprocessing.Process):
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
proc_name = self.name
while True:
next_task = self.task_queue.get()
if next_task is None: # exiting condition for the process.
print '%s: Exiting' % proc_name
self.task_queue.task_done()
break
print '%s: %s' % (proc_name, next_task)
answer = next_task()
self.task_queue.task_done()
self.result_queue.put(answer)
return
class Task(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __call__(self):
time.sleep(1) # pretend to take some time to do the work
return '%s * %s = %s' % (self.a, self.b, self.a * self.b)
def __str__(self):
return '%s * %s' % (self.a, self.b)
if __name__ == '__main__':
# Establish communication queues
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
# Start consumers
num_consumers = multiprocessing.cpu_count() * 2
print 'Creating %d consumers' % num_consumers
consumers = [ Consumer(tasks, results)
for i in xrange(num_consumers) ]
for w in consumers:
w.start()
# Enqueue jobs
num_jobs = 10
print 'Creating %d jobs'%num_jobs
for i in xrange(num_jobs):
tasks.put( Task(i, i) )
# Wait for all of the tasks to finish
tasks.join()
print 'Will add termination flags ...'
time.sleep(10)
# Start printing results
while num_jobs:
result = results.get()
print 'Result:', result
num_jobs -= 1
# Add termination flag for processes.
for i in xrange(num_consumers):
tasks.put( None )
|
Make changes to generate ts.csv and only one csv for each market for the day.import multiprocessing
import time
class Consumer(multiprocessing.Process):
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
proc_name = self.name
while True:
next_task = self.task_queue.get()
if next_task is None: # exiting condition for the process.
print '%s: Exiting' % proc_name
self.task_queue.task_done()
break
print '%s: %s' % (proc_name, next_task)
answer = next_task()
self.task_queue.task_done()
self.result_queue.put(answer)
return
class Task(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __call__(self):
time.sleep(1) # pretend to take some time to do the work
return '%s * %s = %s' % (self.a, self.b, self.a * self.b)
def __str__(self):
return '%s * %s' % (self.a, self.b)
if __name__ == '__main__':
# Establish communication queues
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
# Start consumers
num_consumers = multiprocessing.cpu_count() * 2
print 'Creating %d consumers' % num_consumers
consumers = [ Consumer(tasks, results)
for i in xrange(num_consumers) ]
for w in consumers:
w.start()
# Enqueue jobs
num_jobs = 10
print 'Creating %d jobs'%num_jobs
for i in xrange(num_jobs):
tasks.put( Task(i, i) )
# Wait for all of the tasks to finish
tasks.join()
print 'Will add termination flags ...'
time.sleep(10)
# Start printing results
while num_jobs:
result = results.get()
print 'Result:', result
num_jobs -= 1
# Add termination flag for processes.
for i in xrange(num_consumers):
tasks.put( None )
|
<commit_before><commit_msg>Make changes to generate ts.csv and only one csv for each market for the day.<commit_after>import multiprocessing
import time
class Consumer(multiprocessing.Process):
def __init__(self, task_queue, result_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
def run(self):
proc_name = self.name
while True:
next_task = self.task_queue.get()
if next_task is None: # exiting condition for the process.
print '%s: Exiting' % proc_name
self.task_queue.task_done()
break
print '%s: %s' % (proc_name, next_task)
answer = next_task()
self.task_queue.task_done()
self.result_queue.put(answer)
return
class Task(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __call__(self):
time.sleep(1) # pretend to take some time to do the work
return '%s * %s = %s' % (self.a, self.b, self.a * self.b)
def __str__(self):
return '%s * %s' % (self.a, self.b)
if __name__ == '__main__':
# Establish communication queues
tasks = multiprocessing.JoinableQueue()
results = multiprocessing.Queue()
# Start consumers
num_consumers = multiprocessing.cpu_count() * 2
print 'Creating %d consumers' % num_consumers
consumers = [ Consumer(tasks, results)
for i in xrange(num_consumers) ]
for w in consumers:
w.start()
# Enqueue jobs
num_jobs = 10
print 'Creating %d jobs'%num_jobs
for i in xrange(num_jobs):
tasks.put( Task(i, i) )
# Wait for all of the tasks to finish
tasks.join()
print 'Will add termination flags ...'
time.sleep(10)
# Start printing results
while num_jobs:
result = results.get()
print 'Result:', result
num_jobs -= 1
# Add termination flag for processes.
for i in xrange(num_consumers):
tasks.put( None )
|
|
aef0765b2d76f371bda6e7ce3575bbede1b2a2a2
|
tests/test_init.py
|
tests/test_init.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
class TestInit(TestCase):
def test_centerline_import__import_successful(self):
"""ImportError should not be raised!"""
from centerline import Centerline
|
Test the import of the Centerline class
|
Test the import of the Centerline class
|
Python
|
mit
|
fitodic/polygon-centerline,fitodic/centerline,fitodic/centerline
|
Test the import of the Centerline class
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
class TestInit(TestCase):
def test_centerline_import__import_successful(self):
"""ImportError should not be raised!"""
from centerline import Centerline
|
<commit_before><commit_msg>Test the import of the Centerline class<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
class TestInit(TestCase):
def test_centerline_import__import_successful(self):
"""ImportError should not be raised!"""
from centerline import Centerline
|
Test the import of the Centerline class# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
class TestInit(TestCase):
def test_centerline_import__import_successful(self):
"""ImportError should not be raised!"""
from centerline import Centerline
|
<commit_before><commit_msg>Test the import of the Centerline class<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
class TestInit(TestCase):
def test_centerline_import__import_successful(self):
"""ImportError should not be raised!"""
from centerline import Centerline
|
|
b53d1b632d14904800f74d4c31f7a42b30e3ef7a
|
decorators.py
|
decorators.py
|
#!/usr/bin/env python
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
Add a decorator for requiring a specific type
|
Add a decorator for requiring a specific type
Preperation for moving the rule matching into a class so adding more
rules is simpler, and applying rules to more than two types makes sense.
|
Python
|
bsd-3-clause
|
rasher/reddit-modbot
|
Add a decorator for requiring a specific type
Preperation for moving the rule matching into a class so adding more
rules is simpler, and applying rules to more than two types makes sense.
|
#!/usr/bin/env python
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
<commit_before><commit_msg>Add a decorator for requiring a specific type
Preperation for moving the rule matching into a class so adding more
rules is simpler, and applying rules to more than two types makes sense.<commit_after>
|
#!/usr/bin/env python
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
Add a decorator for requiring a specific type
Preperation for moving the rule matching into a class so adding more
rules is simpler, and applying rules to more than two types makes sense.#!/usr/bin/env python
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
<commit_before><commit_msg>Add a decorator for requiring a specific type
Preperation for moving the rule matching into a class so adding more
rules is simpler, and applying rules to more than two types makes sense.<commit_after>#!/usr/bin/env python
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
|
a2fba4f0f90f069c0fcacc597fc1690f6d9d7609
|
atlantic/bathy/get_bathy.py
|
atlantic/bathy/get_bathy.py
|
#!/usr/bin/env python
"""Simple implementation of a file fetcher"""
import sys
import os
import urllib
import subprocess
def get_bathy(url, destination=os.getcwd(), force=False):
r"""Get bathymetry file located at `url`
Will check downloaded file's suffix to see if the file needs to be extracted
"""
file_name = os.path.basename(url)
output_path = os.path.join(destination, file_name)
if not os.path.exists(output_path) or force:
print "Downloading %s to %s..." % (url, output_path)
urllib.urlretrieve(url, output_path)
print "Finished downloading."
else:
print "Skipping %s, file already exists." % file_name
tar = False
gunzip = False
split_file_name = file_name.split('.')
if split_file_name[-1] == 'gz':
gunzip = True
if split_file_name[-2] == 'tar':
tar = True
if split_file_name[-1] == 'tgz':
gunzip = True
tar = True
if gunzip or tar:
print "Extracting %s" % file_name
if gunzip and tar:
subprocess.Popen('tar xvzf %s' % output_path, shell=True)
elif gunzip:
subprocess.Popen('gunzip %s' % output_path, shell=True)
elif tar:
subprocess.Popen('tar xvf %s' % output_path, shell=True)
if __name__ == "__main__":
# Default URLs
base_url = "http://users.ices.utexas.edu/~kyle/bathy/"
# Override base_url
if len(sys.argv) > 1:
base_url = sys.argv[1]
urls = [os.path.join(base_url, 'atlantic_2min.tt3'),
os.path.join(base_url, 'newyork_3s.tt3')]
for url in urls:
get_bathy(url)
|
Add bathy download script for atlantic
|
Add bathy download script for atlantic
|
Python
|
mit
|
mandli/surge-examples
|
Add bathy download script for atlantic
|
#!/usr/bin/env python
"""Simple implementation of a file fetcher"""
import sys
import os
import urllib
import subprocess
def get_bathy(url, destination=os.getcwd(), force=False):
r"""Get bathymetry file located at `url`
Will check downloaded file's suffix to see if the file needs to be extracted
"""
file_name = os.path.basename(url)
output_path = os.path.join(destination, file_name)
if not os.path.exists(output_path) or force:
print "Downloading %s to %s..." % (url, output_path)
urllib.urlretrieve(url, output_path)
print "Finished downloading."
else:
print "Skipping %s, file already exists." % file_name
tar = False
gunzip = False
split_file_name = file_name.split('.')
if split_file_name[-1] == 'gz':
gunzip = True
if split_file_name[-2] == 'tar':
tar = True
if split_file_name[-1] == 'tgz':
gunzip = True
tar = True
if gunzip or tar:
print "Extracting %s" % file_name
if gunzip and tar:
subprocess.Popen('tar xvzf %s' % output_path, shell=True)
elif gunzip:
subprocess.Popen('gunzip %s' % output_path, shell=True)
elif tar:
subprocess.Popen('tar xvf %s' % output_path, shell=True)
if __name__ == "__main__":
# Default URLs
base_url = "http://users.ices.utexas.edu/~kyle/bathy/"
# Override base_url
if len(sys.argv) > 1:
base_url = sys.argv[1]
urls = [os.path.join(base_url, 'atlantic_2min.tt3'),
os.path.join(base_url, 'newyork_3s.tt3')]
for url in urls:
get_bathy(url)
|
<commit_before><commit_msg>Add bathy download script for atlantic<commit_after>
|
#!/usr/bin/env python
"""Simple implementation of a file fetcher"""
import sys
import os
import urllib
import subprocess
def get_bathy(url, destination=os.getcwd(), force=False):
r"""Get bathymetry file located at `url`
Will check downloaded file's suffix to see if the file needs to be extracted
"""
file_name = os.path.basename(url)
output_path = os.path.join(destination, file_name)
if not os.path.exists(output_path) or force:
print "Downloading %s to %s..." % (url, output_path)
urllib.urlretrieve(url, output_path)
print "Finished downloading."
else:
print "Skipping %s, file already exists." % file_name
tar = False
gunzip = False
split_file_name = file_name.split('.')
if split_file_name[-1] == 'gz':
gunzip = True
if split_file_name[-2] == 'tar':
tar = True
if split_file_name[-1] == 'tgz':
gunzip = True
tar = True
if gunzip or tar:
print "Extracting %s" % file_name
if gunzip and tar:
subprocess.Popen('tar xvzf %s' % output_path, shell=True)
elif gunzip:
subprocess.Popen('gunzip %s' % output_path, shell=True)
elif tar:
subprocess.Popen('tar xvf %s' % output_path, shell=True)
if __name__ == "__main__":
# Default URLs
base_url = "http://users.ices.utexas.edu/~kyle/bathy/"
# Override base_url
if len(sys.argv) > 1:
base_url = sys.argv[1]
urls = [os.path.join(base_url, 'atlantic_2min.tt3'),
os.path.join(base_url, 'newyork_3s.tt3')]
for url in urls:
get_bathy(url)
|
Add bathy download script for atlantic#!/usr/bin/env python
"""Simple implementation of a file fetcher"""
import sys
import os
import urllib
import subprocess
def get_bathy(url, destination=os.getcwd(), force=False):
r"""Get bathymetry file located at `url`
Will check downloaded file's suffix to see if the file needs to be extracted
"""
file_name = os.path.basename(url)
output_path = os.path.join(destination, file_name)
if not os.path.exists(output_path) or force:
print "Downloading %s to %s..." % (url, output_path)
urllib.urlretrieve(url, output_path)
print "Finished downloading."
else:
print "Skipping %s, file already exists." % file_name
tar = False
gunzip = False
split_file_name = file_name.split('.')
if split_file_name[-1] == 'gz':
gunzip = True
if split_file_name[-2] == 'tar':
tar = True
if split_file_name[-1] == 'tgz':
gunzip = True
tar = True
if gunzip or tar:
print "Extracting %s" % file_name
if gunzip and tar:
subprocess.Popen('tar xvzf %s' % output_path, shell=True)
elif gunzip:
subprocess.Popen('gunzip %s' % output_path, shell=True)
elif tar:
subprocess.Popen('tar xvf %s' % output_path, shell=True)
if __name__ == "__main__":
# Default URLs
base_url = "http://users.ices.utexas.edu/~kyle/bathy/"
# Override base_url
if len(sys.argv) > 1:
base_url = sys.argv[1]
urls = [os.path.join(base_url, 'atlantic_2min.tt3'),
os.path.join(base_url, 'newyork_3s.tt3')]
for url in urls:
get_bathy(url)
|
<commit_before><commit_msg>Add bathy download script for atlantic<commit_after>#!/usr/bin/env python
"""Simple implementation of a file fetcher"""
import sys
import os
import urllib
import subprocess
def get_bathy(url, destination=os.getcwd(), force=False):
r"""Get bathymetry file located at `url`
Will check downloaded file's suffix to see if the file needs to be extracted
"""
file_name = os.path.basename(url)
output_path = os.path.join(destination, file_name)
if not os.path.exists(output_path) or force:
print "Downloading %s to %s..." % (url, output_path)
urllib.urlretrieve(url, output_path)
print "Finished downloading."
else:
print "Skipping %s, file already exists." % file_name
tar = False
gunzip = False
split_file_name = file_name.split('.')
if split_file_name[-1] == 'gz':
gunzip = True
if split_file_name[-2] == 'tar':
tar = True
if split_file_name[-1] == 'tgz':
gunzip = True
tar = True
if gunzip or tar:
print "Extracting %s" % file_name
if gunzip and tar:
subprocess.Popen('tar xvzf %s' % output_path, shell=True)
elif gunzip:
subprocess.Popen('gunzip %s' % output_path, shell=True)
elif tar:
subprocess.Popen('tar xvf %s' % output_path, shell=True)
if __name__ == "__main__":
# Default URLs
base_url = "http://users.ices.utexas.edu/~kyle/bathy/"
# Override base_url
if len(sys.argv) > 1:
base_url = sys.argv[1]
urls = [os.path.join(base_url, 'atlantic_2min.tt3'),
os.path.join(base_url, 'newyork_3s.tt3')]
for url in urls:
get_bathy(url)
|
|
816c87ba5f38760adfc87877a1c403da3f0bc054
|
test/test_pips.py
|
test/test_pips.py
|
import pytest
@pytest.mark.parametrize("name", [
("ansible"),
("docker-py"),
("mkdocs"),
("mkdocs-material"),
])
def test_pips(host, name):
assert name in host.pip_package.get_packages()
|
Add tests for pip packages
|
Add tests for pip packages
|
Python
|
mit
|
wicksy/CV,wicksy/CV,wicksy/CV
|
Add tests for pip packages
|
import pytest
@pytest.mark.parametrize("name", [
("ansible"),
("docker-py"),
("mkdocs"),
("mkdocs-material"),
])
def test_pips(host, name):
assert name in host.pip_package.get_packages()
|
<commit_before><commit_msg>Add tests for pip packages<commit_after>
|
import pytest
@pytest.mark.parametrize("name", [
("ansible"),
("docker-py"),
("mkdocs"),
("mkdocs-material"),
])
def test_pips(host, name):
assert name in host.pip_package.get_packages()
|
Add tests for pip packagesimport pytest
@pytest.mark.parametrize("name", [
("ansible"),
("docker-py"),
("mkdocs"),
("mkdocs-material"),
])
def test_pips(host, name):
assert name in host.pip_package.get_packages()
|
<commit_before><commit_msg>Add tests for pip packages<commit_after>import pytest
@pytest.mark.parametrize("name", [
("ansible"),
("docker-py"),
("mkdocs"),
("mkdocs-material"),
])
def test_pips(host, name):
assert name in host.pip_package.get_packages()
|
|
ca8151832647864cd6e63dc84cbed8c77955d91e
|
recipes/solaryears.py
|
recipes/solaryears.py
|
"""
A tropical solar year is the length from spring equinox
to the following spring equinox.
This recipe was implemented to reply to a topic opened
at http://skyscript.co.uk/forums/viewtopic.php?t=8563
and shows that the solar year has an amplitude of more
than 25 minutes, considering the average year of 365.2425
days.
To plot the graphics you must have matplotlib installed .
"""
from flatlib.datetime import Datetime
from flatlib.ephem import ephem
def plot(hdiff, title):
""" Plots the tropical solar length
by year.
"""
import matplotlib.pyplot as plt
years = [elem[0] for elem in hdiff]
diffs = [elem[1] for elem in hdiff]
plt.plot(years, diffs)
plt.ylabel('Distance in minutes')
plt.xlabel('Year')
plt.title(title)
plt.axhline(y=0, c='red')
plt.show()
# Set the starting year
sYear = 1980
# Get successive spring equinox dates
equinoxes = []
span = 100
for year in range(sYear, sYear + span):
# Get the spring equinox date for the year
dt = Datetime('%s/01/01' % year, '00:00')
sr = ephem.nextSolarReturn(dt, 0.00)
equinoxes.append([year, sr.jd])
# Compute successive differences
diffs = []
for i in range(len(equinoxes) - 1):
year1, jd1 = equinoxes[i]
year2, jd2 = equinoxes[i+1]
diffs.append([year1, (jd2 - jd1 - 365.2425) * 24 * 60])
print(diffs)
title = 'Solar year length from %s to %s\n' % (sYear , sYear + span)
title+= '(Compared to average year of 365.2425)'
plot(diffs, title)
|
Add solar years to recipes
|
Add solar years to recipes
|
Python
|
mit
|
flatangle/flatlib
|
Add solar years to recipes
|
"""
A tropical solar year is the length from spring equinox
to the following spring equinox.
This recipe was implemented to reply to a topic opened
at http://skyscript.co.uk/forums/viewtopic.php?t=8563
and shows that the solar year has an amplitude of more
than 25 minutes, considering the average year of 365.2425
days.
To plot the graphics you must have matplotlib installed .
"""
from flatlib.datetime import Datetime
from flatlib.ephem import ephem
def plot(hdiff, title):
""" Plots the tropical solar length
by year.
"""
import matplotlib.pyplot as plt
years = [elem[0] for elem in hdiff]
diffs = [elem[1] for elem in hdiff]
plt.plot(years, diffs)
plt.ylabel('Distance in minutes')
plt.xlabel('Year')
plt.title(title)
plt.axhline(y=0, c='red')
plt.show()
# Set the starting year
sYear = 1980
# Get successive spring equinox dates
equinoxes = []
span = 100
for year in range(sYear, sYear + span):
# Get the spring equinox date for the year
dt = Datetime('%s/01/01' % year, '00:00')
sr = ephem.nextSolarReturn(dt, 0.00)
equinoxes.append([year, sr.jd])
# Compute successive differences
diffs = []
for i in range(len(equinoxes) - 1):
year1, jd1 = equinoxes[i]
year2, jd2 = equinoxes[i+1]
diffs.append([year1, (jd2 - jd1 - 365.2425) * 24 * 60])
print(diffs)
title = 'Solar year length from %s to %s\n' % (sYear , sYear + span)
title+= '(Compared to average year of 365.2425)'
plot(diffs, title)
|
<commit_before><commit_msg>Add solar years to recipes<commit_after>
|
"""
A tropical solar year is the length from spring equinox
to the following spring equinox.
This recipe was implemented to reply to a topic opened
at http://skyscript.co.uk/forums/viewtopic.php?t=8563
and shows that the solar year has an amplitude of more
than 25 minutes, considering the average year of 365.2425
days.
To plot the graphics you must have matplotlib installed .
"""
from flatlib.datetime import Datetime
from flatlib.ephem import ephem
def plot(hdiff, title):
""" Plots the tropical solar length
by year.
"""
import matplotlib.pyplot as plt
years = [elem[0] for elem in hdiff]
diffs = [elem[1] for elem in hdiff]
plt.plot(years, diffs)
plt.ylabel('Distance in minutes')
plt.xlabel('Year')
plt.title(title)
plt.axhline(y=0, c='red')
plt.show()
# Set the starting year
sYear = 1980
# Get successive spring equinox dates
equinoxes = []
span = 100
for year in range(sYear, sYear + span):
# Get the spring equinox date for the year
dt = Datetime('%s/01/01' % year, '00:00')
sr = ephem.nextSolarReturn(dt, 0.00)
equinoxes.append([year, sr.jd])
# Compute successive differences
diffs = []
for i in range(len(equinoxes) - 1):
year1, jd1 = equinoxes[i]
year2, jd2 = equinoxes[i+1]
diffs.append([year1, (jd2 - jd1 - 365.2425) * 24 * 60])
print(diffs)
title = 'Solar year length from %s to %s\n' % (sYear , sYear + span)
title+= '(Compared to average year of 365.2425)'
plot(diffs, title)
|
Add solar years to recipes"""
A tropical solar year is the length from spring equinox
to the following spring equinox.
This recipe was implemented to reply to a topic opened
at http://skyscript.co.uk/forums/viewtopic.php?t=8563
and shows that the solar year has an amplitude of more
than 25 minutes, considering the average year of 365.2425
days.
To plot the graphics you must have matplotlib installed .
"""
from flatlib.datetime import Datetime
from flatlib.ephem import ephem
def plot(hdiff, title):
""" Plots the tropical solar length
by year.
"""
import matplotlib.pyplot as plt
years = [elem[0] for elem in hdiff]
diffs = [elem[1] for elem in hdiff]
plt.plot(years, diffs)
plt.ylabel('Distance in minutes')
plt.xlabel('Year')
plt.title(title)
plt.axhline(y=0, c='red')
plt.show()
# Set the starting year
sYear = 1980
# Get successive spring equinox dates
equinoxes = []
span = 100
for year in range(sYear, sYear + span):
# Get the spring equinox date for the year
dt = Datetime('%s/01/01' % year, '00:00')
sr = ephem.nextSolarReturn(dt, 0.00)
equinoxes.append([year, sr.jd])
# Compute successive differences
diffs = []
for i in range(len(equinoxes) - 1):
year1, jd1 = equinoxes[i]
year2, jd2 = equinoxes[i+1]
diffs.append([year1, (jd2 - jd1 - 365.2425) * 24 * 60])
print(diffs)
title = 'Solar year length from %s to %s\n' % (sYear , sYear + span)
title+= '(Compared to average year of 365.2425)'
plot(diffs, title)
|
<commit_before><commit_msg>Add solar years to recipes<commit_after>"""
A tropical solar year is the length from spring equinox
to the following spring equinox.
This recipe was implemented to reply to a topic opened
at http://skyscript.co.uk/forums/viewtopic.php?t=8563
and shows that the solar year has an amplitude of more
than 25 minutes, considering the average year of 365.2425
days.
To plot the graphics you must have matplotlib installed .
"""
from flatlib.datetime import Datetime
from flatlib.ephem import ephem
def plot(hdiff, title):
""" Plots the tropical solar length
by year.
"""
import matplotlib.pyplot as plt
years = [elem[0] for elem in hdiff]
diffs = [elem[1] for elem in hdiff]
plt.plot(years, diffs)
plt.ylabel('Distance in minutes')
plt.xlabel('Year')
plt.title(title)
plt.axhline(y=0, c='red')
plt.show()
# Set the starting year
sYear = 1980
# Get successive spring equinox dates
equinoxes = []
span = 100
for year in range(sYear, sYear + span):
# Get the spring equinox date for the year
dt = Datetime('%s/01/01' % year, '00:00')
sr = ephem.nextSolarReturn(dt, 0.00)
equinoxes.append([year, sr.jd])
# Compute successive differences
diffs = []
for i in range(len(equinoxes) - 1):
year1, jd1 = equinoxes[i]
year2, jd2 = equinoxes[i+1]
diffs.append([year1, (jd2 - jd1 - 365.2425) * 24 * 60])
print(diffs)
title = 'Solar year length from %s to %s\n' % (sYear , sYear + span)
title+= '(Compared to average year of 365.2425)'
plot(diffs, title)
|
|
80bcce3208941df6b51d97fabdacaa93bb764376
|
clbundler/buildtools.py
|
clbundler/buildtools.py
|
import os
import system
def cmake_generator(toolchain, arch):
"""Return name of CMake generator for toolchain"""
generator = ""
if toolchain.startswith("vc"):
if toolchain == "vc9":
generator = "Visual Studio 9 2008"
else:
generator = "Visual Studio " + vc_version(toolchain)
if arch == "x64":
generator = generator + " Win64"
else:
generator = "Unix Makefiles"
return generator
def cmake(context, options={}, build_dir=""):
"""Configure a CMake based project
The current directory is assumed to be the top level source directory
CMAKE_INSTALL_PREFIX will be set to context.install_dir
Arguments:
context -- BuildContext instance
options -- dictionary of CMake cache variables
build_dir -- the directory used for the build (defaults to ./cmake_build)
"""
if not build_dir:
build_dir = "cmake_build"
if not os.path.exists(build_dir):
os.mkdir(build_dir)
os.chdir(build_dir)
args = ["-D", "CMAKE_INSTALL_PREFIX=" + context.install_dir]
for i in options.iteritems():
args.extend(["-D", "=".join(i)])
args.extend(["-G", cmake_generator(context.toolchain, context.arch)])
args.append(os.path.relpath(".", build_dir))
system.run_cmd("cmake", args)
os.chdir("..")
def vc_version(toolchain):
"""Return the Visual C++ version from the toolchain string"""
if toolchain.startswith("vc"):
return toolchain[2:]
def vcproj_ext(version):
"""Return file extension for Visual C++ projects"""
if int(version) > 9:
return ".vcxproj"
else:
return ".vcproj"
def vcbuild(context, filepath, config, extras=[]):
"""Build a Visual C++ project file or solution
Uses vcbuild for vc9 and older, msbuild otherwise
Arguments:
context -- BuildContext instance
filepath -- path to project or solution
config -- the solution configuration to use
extras -- extra command line options to pass to vcbuild or msbuild
"""
if context.arch == "x64":
platform = "Win64"
else:
platform = "Win32"
if int(vc_version(context.toolchain)) > 9:
system.run_cmd("msbuild", [filepath, "/m", "/nologo", "/verbosity:minimal",
"/p:Configuration=" + config,
"/p:Platform=" + platform] + extras)
else:
system.run_cmd("vcbuild", [filepath, "{0}|{1}".format(config, platform)] + extras)
|
Add functions for running cmake and vcbuild
|
Add functions for running cmake and vcbuild
|
Python
|
mit
|
peterl94/CLbundler,peterl94/CLbundler
|
Add functions for running cmake and vcbuild
|
import os
import system
def cmake_generator(toolchain, arch):
"""Return name of CMake generator for toolchain"""
generator = ""
if toolchain.startswith("vc"):
if toolchain == "vc9":
generator = "Visual Studio 9 2008"
else:
generator = "Visual Studio " + vc_version(toolchain)
if arch == "x64":
generator = generator + " Win64"
else:
generator = "Unix Makefiles"
return generator
def cmake(context, options={}, build_dir=""):
"""Configure a CMake based project
The current directory is assumed to be the top level source directory
CMAKE_INSTALL_PREFIX will be set to context.install_dir
Arguments:
context -- BuildContext instance
options -- dictionary of CMake cache variables
build_dir -- the directory used for the build (defaults to ./cmake_build)
"""
if not build_dir:
build_dir = "cmake_build"
if not os.path.exists(build_dir):
os.mkdir(build_dir)
os.chdir(build_dir)
args = ["-D", "CMAKE_INSTALL_PREFIX=" + context.install_dir]
for i in options.iteritems():
args.extend(["-D", "=".join(i)])
args.extend(["-G", cmake_generator(context.toolchain, context.arch)])
args.append(os.path.relpath(".", build_dir))
system.run_cmd("cmake", args)
os.chdir("..")
def vc_version(toolchain):
"""Return the Visual C++ version from the toolchain string"""
if toolchain.startswith("vc"):
return toolchain[2:]
def vcproj_ext(version):
"""Return file extension for Visual C++ projects"""
if int(version) > 9:
return ".vcxproj"
else:
return ".vcproj"
def vcbuild(context, filepath, config, extras=[]):
"""Build a Visual C++ project file or solution
Uses vcbuild for vc9 and older, msbuild otherwise
Arguments:
context -- BuildContext instance
filepath -- path to project or solution
config -- the solution configuration to use
extras -- extra command line options to pass to vcbuild or msbuild
"""
if context.arch == "x64":
platform = "Win64"
else:
platform = "Win32"
if int(vc_version(context.toolchain)) > 9:
system.run_cmd("msbuild", [filepath, "/m", "/nologo", "/verbosity:minimal",
"/p:Configuration=" + config,
"/p:Platform=" + platform] + extras)
else:
system.run_cmd("vcbuild", [filepath, "{0}|{1}".format(config, platform)] + extras)
|
<commit_before><commit_msg>Add functions for running cmake and vcbuild<commit_after>
|
import os
import system
def cmake_generator(toolchain, arch):
"""Return name of CMake generator for toolchain"""
generator = ""
if toolchain.startswith("vc"):
if toolchain == "vc9":
generator = "Visual Studio 9 2008"
else:
generator = "Visual Studio " + vc_version(toolchain)
if arch == "x64":
generator = generator + " Win64"
else:
generator = "Unix Makefiles"
return generator
def cmake(context, options={}, build_dir=""):
"""Configure a CMake based project
The current directory is assumed to be the top level source directory
CMAKE_INSTALL_PREFIX will be set to context.install_dir
Arguments:
context -- BuildContext instance
options -- dictionary of CMake cache variables
build_dir -- the directory used for the build (defaults to ./cmake_build)
"""
if not build_dir:
build_dir = "cmake_build"
if not os.path.exists(build_dir):
os.mkdir(build_dir)
os.chdir(build_dir)
args = ["-D", "CMAKE_INSTALL_PREFIX=" + context.install_dir]
for i in options.iteritems():
args.extend(["-D", "=".join(i)])
args.extend(["-G", cmake_generator(context.toolchain, context.arch)])
args.append(os.path.relpath(".", build_dir))
system.run_cmd("cmake", args)
os.chdir("..")
def vc_version(toolchain):
"""Return the Visual C++ version from the toolchain string"""
if toolchain.startswith("vc"):
return toolchain[2:]
def vcproj_ext(version):
"""Return file extension for Visual C++ projects"""
if int(version) > 9:
return ".vcxproj"
else:
return ".vcproj"
def vcbuild(context, filepath, config, extras=[]):
"""Build a Visual C++ project file or solution
Uses vcbuild for vc9 and older, msbuild otherwise
Arguments:
context -- BuildContext instance
filepath -- path to project or solution
config -- the solution configuration to use
extras -- extra command line options to pass to vcbuild or msbuild
"""
if context.arch == "x64":
platform = "Win64"
else:
platform = "Win32"
if int(vc_version(context.toolchain)) > 9:
system.run_cmd("msbuild", [filepath, "/m", "/nologo", "/verbosity:minimal",
"/p:Configuration=" + config,
"/p:Platform=" + platform] + extras)
else:
system.run_cmd("vcbuild", [filepath, "{0}|{1}".format(config, platform)] + extras)
|
Add functions for running cmake and vcbuildimport os
import system
def cmake_generator(toolchain, arch):
"""Return name of CMake generator for toolchain"""
generator = ""
if toolchain.startswith("vc"):
if toolchain == "vc9":
generator = "Visual Studio 9 2008"
else:
generator = "Visual Studio " + vc_version(toolchain)
if arch == "x64":
generator = generator + " Win64"
else:
generator = "Unix Makefiles"
return generator
def cmake(context, options={}, build_dir=""):
"""Configure a CMake based project
The current directory is assumed to be the top level source directory
CMAKE_INSTALL_PREFIX will be set to context.install_dir
Arguments:
context -- BuildContext instance
options -- dictionary of CMake cache variables
build_dir -- the directory used for the build (defaults to ./cmake_build)
"""
if not build_dir:
build_dir = "cmake_build"
if not os.path.exists(build_dir):
os.mkdir(build_dir)
os.chdir(build_dir)
args = ["-D", "CMAKE_INSTALL_PREFIX=" + context.install_dir]
for i in options.iteritems():
args.extend(["-D", "=".join(i)])
args.extend(["-G", cmake_generator(context.toolchain, context.arch)])
args.append(os.path.relpath(".", build_dir))
system.run_cmd("cmake", args)
os.chdir("..")
def vc_version(toolchain):
"""Return the Visual C++ version from the toolchain string"""
if toolchain.startswith("vc"):
return toolchain[2:]
def vcproj_ext(version):
"""Return file extension for Visual C++ projects"""
if int(version) > 9:
return ".vcxproj"
else:
return ".vcproj"
def vcbuild(context, filepath, config, extras=[]):
"""Build a Visual C++ project file or solution
Uses vcbuild for vc9 and older, msbuild otherwise
Arguments:
context -- BuildContext instance
filepath -- path to project or solution
config -- the solution configuration to use
extras -- extra command line options to pass to vcbuild or msbuild
"""
if context.arch == "x64":
platform = "Win64"
else:
platform = "Win32"
if int(vc_version(context.toolchain)) > 9:
system.run_cmd("msbuild", [filepath, "/m", "/nologo", "/verbosity:minimal",
"/p:Configuration=" + config,
"/p:Platform=" + platform] + extras)
else:
system.run_cmd("vcbuild", [filepath, "{0}|{1}".format(config, platform)] + extras)
|
<commit_before><commit_msg>Add functions for running cmake and vcbuild<commit_after>import os
import system
def cmake_generator(toolchain, arch):
"""Return name of CMake generator for toolchain"""
generator = ""
if toolchain.startswith("vc"):
if toolchain == "vc9":
generator = "Visual Studio 9 2008"
else:
generator = "Visual Studio " + vc_version(toolchain)
if arch == "x64":
generator = generator + " Win64"
else:
generator = "Unix Makefiles"
return generator
def cmake(context, options={}, build_dir=""):
"""Configure a CMake based project
The current directory is assumed to be the top level source directory
CMAKE_INSTALL_PREFIX will be set to context.install_dir
Arguments:
context -- BuildContext instance
options -- dictionary of CMake cache variables
build_dir -- the directory used for the build (defaults to ./cmake_build)
"""
if not build_dir:
build_dir = "cmake_build"
if not os.path.exists(build_dir):
os.mkdir(build_dir)
os.chdir(build_dir)
args = ["-D", "CMAKE_INSTALL_PREFIX=" + context.install_dir]
for i in options.iteritems():
args.extend(["-D", "=".join(i)])
args.extend(["-G", cmake_generator(context.toolchain, context.arch)])
args.append(os.path.relpath(".", build_dir))
system.run_cmd("cmake", args)
os.chdir("..")
def vc_version(toolchain):
"""Return the Visual C++ version from the toolchain string"""
if toolchain.startswith("vc"):
return toolchain[2:]
def vcproj_ext(version):
"""Return file extension for Visual C++ projects"""
if int(version) > 9:
return ".vcxproj"
else:
return ".vcproj"
def vcbuild(context, filepath, config, extras=[]):
"""Build a Visual C++ project file or solution
Uses vcbuild for vc9 and older, msbuild otherwise
Arguments:
context -- BuildContext instance
filepath -- path to project or solution
config -- the solution configuration to use
extras -- extra command line options to pass to vcbuild or msbuild
"""
if context.arch == "x64":
platform = "Win64"
else:
platform = "Win32"
if int(vc_version(context.toolchain)) > 9:
system.run_cmd("msbuild", [filepath, "/m", "/nologo", "/verbosity:minimal",
"/p:Configuration=" + config,
"/p:Platform=" + platform] + extras)
else:
system.run_cmd("vcbuild", [filepath, "{0}|{1}".format(config, platform)] + extras)
|
|
3b146038ca6aebfdc11920cc688903124ccc2b3a
|
src/ggrc/converters/handlers/document.py
|
src/ggrc/converters/handlers/document.py
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
Add import parser for url and evidence
|
Add import parser for url and evidence
|
Python
|
apache-2.0
|
selahssea/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
Add import parser for url and evidence
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
<commit_before># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
<commit_msg>Add import parser for url and evidence<commit_after>
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
Add import parser for url and evidence# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
<commit_before># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
pass
def set_obj_attr(self):
pass
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
<commit_msg>Add import parser for url and evidence<commit_after># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for special object mappings."""
from flask import current_app
from ggrc import models
from ggrc.login import get_current_user_id
from ggrc.converters import errors
from ggrc.converters import get_importables
from ggrc.converters.handlers import handlers
class RequestLinkHandler(handlers.ColumnHandler):
def parse_item(self):
documents = []
for line in self.raw_value.splitlines():
link, title = line.split(None, 1) if " " in line else (line, line)
documents.append(models.Document(
link=link,
title=title,
modified_by_id=get_current_user_id(),
context=self.row_converter.obj.context,
))
return documents
def set_obj_attr(self):
self.value = self.parse_item()
class RequestEvidenceHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
class RequestUrlHandler(RequestLinkHandler):
def get_value(self):
pass
def insert_object(self):
pass
def set_value(self):
"""This should be ignored with second class attributes."""
|
e101f2687a6857f218b9dab277e323edf377a0fb
|
tests/test_partitioned_model.py
|
tests/test_partitioned_model.py
|
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioning_model
def test_partitioned_model_abstract():
"""Tests whether :see:PostgresPartitionedModel is abstract."""
assert PostgresPartitionedModel._meta.abstract
def test_partitioning_model_options_meta():
"""Tests whether the `_partitioning_meta` attribute is available on the
class (created by the meta class) and not just creating when the model is
instantiated."""
assert PostgresPartitionedModel._partitioning_meta
def test_partitioned_model_default_options():
"""Tests whether the default partitioning options are set as expected on
:see:PostgresPartitionedModel."""
model = define_fake_partitioning_model()
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
assert model._partitioning_meta.key == []
def test_partitioned_model_method_option():
"""Tests whether the `method` partitioning option is properly copied onto
the options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=PostgresPartitioningMethod.LIST)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.LIST
def test_partitioned_model_method_option_none():
"""Tests whether setting the `method` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=None)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
def test_partitioned_model_key_option():
"""Tests whether the `key` partitioning option is properly copied onto the
options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(key=["timestamp"])
)
assert model._partitioning_meta.key == ["timestamp"]
def test_partitioned_model_key_option_none():
"""Tests whether setting the `key` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(partitioning_options=dict(key=None))
assert model._partitioning_meta.key == []
|
Add tests for PostgresPartitionedModel meta options
|
Add tests for PostgresPartitionedModel meta options
|
Python
|
mit
|
SectorLabs/django-postgres-extra
|
Add tests for PostgresPartitionedModel meta options
|
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioning_model
def test_partitioned_model_abstract():
"""Tests whether :see:PostgresPartitionedModel is abstract."""
assert PostgresPartitionedModel._meta.abstract
def test_partitioning_model_options_meta():
"""Tests whether the `_partitioning_meta` attribute is available on the
class (created by the meta class) and not just creating when the model is
instantiated."""
assert PostgresPartitionedModel._partitioning_meta
def test_partitioned_model_default_options():
"""Tests whether the default partitioning options are set as expected on
:see:PostgresPartitionedModel."""
model = define_fake_partitioning_model()
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
assert model._partitioning_meta.key == []
def test_partitioned_model_method_option():
"""Tests whether the `method` partitioning option is properly copied onto
the options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=PostgresPartitioningMethod.LIST)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.LIST
def test_partitioned_model_method_option_none():
"""Tests whether setting the `method` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=None)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
def test_partitioned_model_key_option():
"""Tests whether the `key` partitioning option is properly copied onto the
options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(key=["timestamp"])
)
assert model._partitioning_meta.key == ["timestamp"]
def test_partitioned_model_key_option_none():
"""Tests whether setting the `key` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(partitioning_options=dict(key=None))
assert model._partitioning_meta.key == []
|
<commit_before><commit_msg>Add tests for PostgresPartitionedModel meta options<commit_after>
|
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioning_model
def test_partitioned_model_abstract():
"""Tests whether :see:PostgresPartitionedModel is abstract."""
assert PostgresPartitionedModel._meta.abstract
def test_partitioning_model_options_meta():
"""Tests whether the `_partitioning_meta` attribute is available on the
class (created by the meta class) and not just creating when the model is
instantiated."""
assert PostgresPartitionedModel._partitioning_meta
def test_partitioned_model_default_options():
"""Tests whether the default partitioning options are set as expected on
:see:PostgresPartitionedModel."""
model = define_fake_partitioning_model()
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
assert model._partitioning_meta.key == []
def test_partitioned_model_method_option():
"""Tests whether the `method` partitioning option is properly copied onto
the options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=PostgresPartitioningMethod.LIST)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.LIST
def test_partitioned_model_method_option_none():
"""Tests whether setting the `method` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=None)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
def test_partitioned_model_key_option():
"""Tests whether the `key` partitioning option is properly copied onto the
options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(key=["timestamp"])
)
assert model._partitioning_meta.key == ["timestamp"]
def test_partitioned_model_key_option_none():
"""Tests whether setting the `key` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(partitioning_options=dict(key=None))
assert model._partitioning_meta.key == []
|
Add tests for PostgresPartitionedModel meta options
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioning_model
def test_partitioned_model_abstract():
"""Tests whether :see:PostgresPartitionedModel is abstract."""
assert PostgresPartitionedModel._meta.abstract
def test_partitioning_model_options_meta():
"""Tests whether the `_partitioning_meta` attribute is available on the
class (created by the meta class) and not just creating when the model is
instantiated."""
assert PostgresPartitionedModel._partitioning_meta
def test_partitioned_model_default_options():
"""Tests whether the default partitioning options are set as expected on
:see:PostgresPartitionedModel."""
model = define_fake_partitioning_model()
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
assert model._partitioning_meta.key == []
def test_partitioned_model_method_option():
"""Tests whether the `method` partitioning option is properly copied onto
the options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=PostgresPartitioningMethod.LIST)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.LIST
def test_partitioned_model_method_option_none():
"""Tests whether setting the `method` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=None)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
def test_partitioned_model_key_option():
"""Tests whether the `key` partitioning option is properly copied onto the
options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(key=["timestamp"])
)
assert model._partitioning_meta.key == ["timestamp"]
def test_partitioned_model_key_option_none():
"""Tests whether setting the `key` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(partitioning_options=dict(key=None))
assert model._partitioning_meta.key == []
|
<commit_before><commit_msg>Add tests for PostgresPartitionedModel meta options<commit_after>
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioning_model
def test_partitioned_model_abstract():
"""Tests whether :see:PostgresPartitionedModel is abstract."""
assert PostgresPartitionedModel._meta.abstract
def test_partitioning_model_options_meta():
"""Tests whether the `_partitioning_meta` attribute is available on the
class (created by the meta class) and not just creating when the model is
instantiated."""
assert PostgresPartitionedModel._partitioning_meta
def test_partitioned_model_default_options():
"""Tests whether the default partitioning options are set as expected on
:see:PostgresPartitionedModel."""
model = define_fake_partitioning_model()
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
assert model._partitioning_meta.key == []
def test_partitioned_model_method_option():
"""Tests whether the `method` partitioning option is properly copied onto
the options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=PostgresPartitioningMethod.LIST)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.LIST
def test_partitioned_model_method_option_none():
"""Tests whether setting the `method` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(
partitioning_options=dict(method=None)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
def test_partitioned_model_key_option():
"""Tests whether the `key` partitioning option is properly copied onto the
options object."""
model = define_fake_partitioning_model(
partitioning_options=dict(key=["timestamp"])
)
assert model._partitioning_meta.key == ["timestamp"]
def test_partitioned_model_key_option_none():
"""Tests whether setting the `key` partitioning option results in the
default being set."""
model = define_fake_partitioning_model(partitioning_options=dict(key=None))
assert model._partitioning_meta.key == []
|
|
db23070f9b740d559b84724ad0ed61e38eb15dec
|
multigraph.py
|
multigraph.py
|
"""
An extension of a standard cligraph for plotting
graphs with subplots using gridspec
"""
import matplotlib
import matplotlib.gridspec as gridspec
from cligraph import CLIgraph
class MultiGraph(CLIgraph):
def __init__(self, num_plots_x, num_plots_y, **kwargs):
super(MultiGraph, self).__init__(**kwargs)
self.num_plots_x = num_plots_x
self.num_plots_y = num_plots_y
def get_parser(self):
parser = super(MultiGraph, self).get_parser()
parser.add_argument('--multigraph-specific')
return parser
def format_axes(self, axes, cli_args):
"""
Apply formatting to the axes by iterating over our axes""
"""
for ax in axes:
super(MultiGraph, self).format_axes(ax, cli_args)
def create_axes(self, fig, cli_args):
"""
Create the axes for this graph using gridspec for subplots
"""
self.grid_spec = gridspec.GridSpec(self.num_plots_x, self.num_plots_y)
axes = [fig.add_subplot(sp) for sp in self.grid_spec]
return axes
def apply_lables_and_titles(self, fig, axes, cli_args):
"""
Set graph titles and labels. With multiple plots, grid_spec dimensions are adjusted
to make space
"""
title, x_label, y_label = map(lambda s: s.decode('string_escape'), [
cli_args.title, cli_args.x_label, cli_args.y_label])
# For multiple axes, we make room and apply text(); this keeps things working
# with tight layout
if title:
self.gs_top -= 0.02
self.plt.suptitle(title)
if x_label:
# Ajust rather than set, children can then make space for other graphics
self.gs_bottom += 0.02
fig.text(0.5, self.gs_bottom, cli_args.x_label, ha='center', va='center')
if y_label:
self.gs_left += 0.02
fig.text(self.gs_left, 0.5, cli_args.y_label,
ha='center', va='center', rotation='vertical')
def finalise(self, fig, cli_args):
"""
Set final graph attributes then show and or save
"""
self.grid_spec.tight_layout(
fig, rect=[self.gs_left, self.gs_bottom, self.gs_right, self.gs_top])
if cli_args.save is not None:
for format in cli_args.save_formats.split(','):
self.plt.savefig(cli_args.save + '.' + format)
if not cli_args.quiet:
self.plt.show()
|
Split support for suplots into a MultiGraph subclass
|
Split support for suplots into a MultiGraph subclass
|
Python
|
agpl-3.0
|
bsmithers/CLIgraphs
|
Split support for suplots into a MultiGraph subclass
|
"""
An extension of a standard cligraph for plotting
graphs with subplots using gridspec
"""
import matplotlib
import matplotlib.gridspec as gridspec
from cligraph import CLIgraph
class MultiGraph(CLIgraph):
def __init__(self, num_plots_x, num_plots_y, **kwargs):
super(MultiGraph, self).__init__(**kwargs)
self.num_plots_x = num_plots_x
self.num_plots_y = num_plots_y
def get_parser(self):
parser = super(MultiGraph, self).get_parser()
parser.add_argument('--multigraph-specific')
return parser
def format_axes(self, axes, cli_args):
"""
Apply formatting to the axes by iterating over our axes""
"""
for ax in axes:
super(MultiGraph, self).format_axes(ax, cli_args)
def create_axes(self, fig, cli_args):
"""
Create the axes for this graph using gridspec for subplots
"""
self.grid_spec = gridspec.GridSpec(self.num_plots_x, self.num_plots_y)
axes = [fig.add_subplot(sp) for sp in self.grid_spec]
return axes
def apply_lables_and_titles(self, fig, axes, cli_args):
"""
Set graph titles and labels. With multiple plots, grid_spec dimensions are adjusted
to make space
"""
title, x_label, y_label = map(lambda s: s.decode('string_escape'), [
cli_args.title, cli_args.x_label, cli_args.y_label])
# For multiple axes, we make room and apply text(); this keeps things working
# with tight layout
if title:
self.gs_top -= 0.02
self.plt.suptitle(title)
if x_label:
# Ajust rather than set, children can then make space for other graphics
self.gs_bottom += 0.02
fig.text(0.5, self.gs_bottom, cli_args.x_label, ha='center', va='center')
if y_label:
self.gs_left += 0.02
fig.text(self.gs_left, 0.5, cli_args.y_label,
ha='center', va='center', rotation='vertical')
def finalise(self, fig, cli_args):
"""
Set final graph attributes then show and or save
"""
self.grid_spec.tight_layout(
fig, rect=[self.gs_left, self.gs_bottom, self.gs_right, self.gs_top])
if cli_args.save is not None:
for format in cli_args.save_formats.split(','):
self.plt.savefig(cli_args.save + '.' + format)
if not cli_args.quiet:
self.plt.show()
|
<commit_before><commit_msg>Split support for suplots into a MultiGraph subclass<commit_after>
|
"""
An extension of a standard cligraph for plotting
graphs with subplots using gridspec
"""
import matplotlib
import matplotlib.gridspec as gridspec
from cligraph import CLIgraph
class MultiGraph(CLIgraph):
def __init__(self, num_plots_x, num_plots_y, **kwargs):
super(MultiGraph, self).__init__(**kwargs)
self.num_plots_x = num_plots_x
self.num_plots_y = num_plots_y
def get_parser(self):
parser = super(MultiGraph, self).get_parser()
parser.add_argument('--multigraph-specific')
return parser
def format_axes(self, axes, cli_args):
"""
Apply formatting to the axes by iterating over our axes""
"""
for ax in axes:
super(MultiGraph, self).format_axes(ax, cli_args)
def create_axes(self, fig, cli_args):
"""
Create the axes for this graph using gridspec for subplots
"""
self.grid_spec = gridspec.GridSpec(self.num_plots_x, self.num_plots_y)
axes = [fig.add_subplot(sp) for sp in self.grid_spec]
return axes
def apply_lables_and_titles(self, fig, axes, cli_args):
"""
Set graph titles and labels. With multiple plots, grid_spec dimensions are adjusted
to make space
"""
title, x_label, y_label = map(lambda s: s.decode('string_escape'), [
cli_args.title, cli_args.x_label, cli_args.y_label])
# For multiple axes, we make room and apply text(); this keeps things working
# with tight layout
if title:
self.gs_top -= 0.02
self.plt.suptitle(title)
if x_label:
# Ajust rather than set, children can then make space for other graphics
self.gs_bottom += 0.02
fig.text(0.5, self.gs_bottom, cli_args.x_label, ha='center', va='center')
if y_label:
self.gs_left += 0.02
fig.text(self.gs_left, 0.5, cli_args.y_label,
ha='center', va='center', rotation='vertical')
def finalise(self, fig, cli_args):
"""
Set final graph attributes then show and or save
"""
self.grid_spec.tight_layout(
fig, rect=[self.gs_left, self.gs_bottom, self.gs_right, self.gs_top])
if cli_args.save is not None:
for format in cli_args.save_formats.split(','):
self.plt.savefig(cli_args.save + '.' + format)
if not cli_args.quiet:
self.plt.show()
|
Split support for suplots into a MultiGraph subclass"""
An extension of a standard cligraph for plotting
graphs with subplots using gridspec
"""
import matplotlib
import matplotlib.gridspec as gridspec
from cligraph import CLIgraph
class MultiGraph(CLIgraph):
def __init__(self, num_plots_x, num_plots_y, **kwargs):
super(MultiGraph, self).__init__(**kwargs)
self.num_plots_x = num_plots_x
self.num_plots_y = num_plots_y
def get_parser(self):
parser = super(MultiGraph, self).get_parser()
parser.add_argument('--multigraph-specific')
return parser
def format_axes(self, axes, cli_args):
"""
Apply formatting to the axes by iterating over our axes""
"""
for ax in axes:
super(MultiGraph, self).format_axes(ax, cli_args)
def create_axes(self, fig, cli_args):
"""
Create the axes for this graph using gridspec for subplots
"""
self.grid_spec = gridspec.GridSpec(self.num_plots_x, self.num_plots_y)
axes = [fig.add_subplot(sp) for sp in self.grid_spec]
return axes
def apply_lables_and_titles(self, fig, axes, cli_args):
"""
Set graph titles and labels. With multiple plots, grid_spec dimensions are adjusted
to make space
"""
title, x_label, y_label = map(lambda s: s.decode('string_escape'), [
cli_args.title, cli_args.x_label, cli_args.y_label])
# For multiple axes, we make room and apply text(); this keeps things working
# with tight layout
if title:
self.gs_top -= 0.02
self.plt.suptitle(title)
if x_label:
# Ajust rather than set, children can then make space for other graphics
self.gs_bottom += 0.02
fig.text(0.5, self.gs_bottom, cli_args.x_label, ha='center', va='center')
if y_label:
self.gs_left += 0.02
fig.text(self.gs_left, 0.5, cli_args.y_label,
ha='center', va='center', rotation='vertical')
def finalise(self, fig, cli_args):
"""
Set final graph attributes then show and or save
"""
self.grid_spec.tight_layout(
fig, rect=[self.gs_left, self.gs_bottom, self.gs_right, self.gs_top])
if cli_args.save is not None:
for format in cli_args.save_formats.split(','):
self.plt.savefig(cli_args.save + '.' + format)
if not cli_args.quiet:
self.plt.show()
|
<commit_before><commit_msg>Split support for suplots into a MultiGraph subclass<commit_after>"""
An extension of a standard cligraph for plotting
graphs with subplots using gridspec
"""
import matplotlib
import matplotlib.gridspec as gridspec
from cligraph import CLIgraph
class MultiGraph(CLIgraph):
def __init__(self, num_plots_x, num_plots_y, **kwargs):
super(MultiGraph, self).__init__(**kwargs)
self.num_plots_x = num_plots_x
self.num_plots_y = num_plots_y
def get_parser(self):
parser = super(MultiGraph, self).get_parser()
parser.add_argument('--multigraph-specific')
return parser
def format_axes(self, axes, cli_args):
"""
Apply formatting to the axes by iterating over our axes""
"""
for ax in axes:
super(MultiGraph, self).format_axes(ax, cli_args)
def create_axes(self, fig, cli_args):
"""
Create the axes for this graph using gridspec for subplots
"""
self.grid_spec = gridspec.GridSpec(self.num_plots_x, self.num_plots_y)
axes = [fig.add_subplot(sp) for sp in self.grid_spec]
return axes
def apply_lables_and_titles(self, fig, axes, cli_args):
"""
Set graph titles and labels. With multiple plots, grid_spec dimensions are adjusted
to make space
"""
title, x_label, y_label = map(lambda s: s.decode('string_escape'), [
cli_args.title, cli_args.x_label, cli_args.y_label])
# For multiple axes, we make room and apply text(); this keeps things working
# with tight layout
if title:
self.gs_top -= 0.02
self.plt.suptitle(title)
if x_label:
# Ajust rather than set, children can then make space for other graphics
self.gs_bottom += 0.02
fig.text(0.5, self.gs_bottom, cli_args.x_label, ha='center', va='center')
if y_label:
self.gs_left += 0.02
fig.text(self.gs_left, 0.5, cli_args.y_label,
ha='center', va='center', rotation='vertical')
def finalise(self, fig, cli_args):
"""
Set final graph attributes then show and or save
"""
self.grid_spec.tight_layout(
fig, rect=[self.gs_left, self.gs_bottom, self.gs_right, self.gs_top])
if cli_args.save is not None:
for format in cli_args.save_formats.split(','):
self.plt.savefig(cli_args.save + '.' + format)
if not cli_args.quiet:
self.plt.show()
|
|
de57486712d60995dd4e75ace21b9ce4f824a552
|
WeeklyLogParser.py
|
WeeklyLogParser.py
|
import sys
import re
"""
To do:
- Add web interaction to get the string argument
directly from web instead of having to manually put the
argument as a command line argument
"""
try:
dates = sys.argv[1]
listDates = dates.split('\n')
xfConsumptionRegex = re.compile(r'[\d,]+$')
xfConsumptionWeekly = [int(re.sub(r',', '', re.search(xfConsumptionRegex, i).group())) for i in listDates]
sumXFConsumption = sum(xfConsumptionWeekly)
#print("List of Dates")
#print(listDates)
print("XF Consumption List")
print(xfConsumptionWeekly)
print("Sum of XF Consumption")
print(sumXFConsumption)
except IndexError:
print("""At least one argument required.
Example:
05 Jul 2015 1,546 10,260 0 11,806
06 Jul 2015 1,520 10,260 0 11,780
07 Jul 2015 1,530 10,247 0 11,777
08 Jul 2015 1,524 10,257 0 11,781
09 Jul 2015 1,516 10,261 0 11,777
10 Jul 2015 1,494 10,209 0 11,703
11 Jul 2015 1,506 10,272 0 11,778
12 Jul 2015 1,470 10,193 0 11,663""")
|
Add parser for weekly log
|
Add parser for weekly log
|
Python
|
mit
|
josecolella/Dynatrace-Resources
|
Add parser for weekly log
|
import sys
import re
"""
To do:
- Add web interaction to get the string argument
directly from web instead of having to manually put the
argument as a command line argument
"""
try:
dates = sys.argv[1]
listDates = dates.split('\n')
xfConsumptionRegex = re.compile(r'[\d,]+$')
xfConsumptionWeekly = [int(re.sub(r',', '', re.search(xfConsumptionRegex, i).group())) for i in listDates]
sumXFConsumption = sum(xfConsumptionWeekly)
#print("List of Dates")
#print(listDates)
print("XF Consumption List")
print(xfConsumptionWeekly)
print("Sum of XF Consumption")
print(sumXFConsumption)
except IndexError:
print("""At least one argument required.
Example:
05 Jul 2015 1,546 10,260 0 11,806
06 Jul 2015 1,520 10,260 0 11,780
07 Jul 2015 1,530 10,247 0 11,777
08 Jul 2015 1,524 10,257 0 11,781
09 Jul 2015 1,516 10,261 0 11,777
10 Jul 2015 1,494 10,209 0 11,703
11 Jul 2015 1,506 10,272 0 11,778
12 Jul 2015 1,470 10,193 0 11,663""")
|
<commit_before><commit_msg>Add parser for weekly log<commit_after>
|
import sys
import re
"""
To do:
- Add web interaction to get the string argument
directly from web instead of having to manually put the
argument as a command line argument
"""
try:
dates = sys.argv[1]
listDates = dates.split('\n')
xfConsumptionRegex = re.compile(r'[\d,]+$')
xfConsumptionWeekly = [int(re.sub(r',', '', re.search(xfConsumptionRegex, i).group())) for i in listDates]
sumXFConsumption = sum(xfConsumptionWeekly)
#print("List of Dates")
#print(listDates)
print("XF Consumption List")
print(xfConsumptionWeekly)
print("Sum of XF Consumption")
print(sumXFConsumption)
except IndexError:
print("""At least one argument required.
Example:
05 Jul 2015 1,546 10,260 0 11,806
06 Jul 2015 1,520 10,260 0 11,780
07 Jul 2015 1,530 10,247 0 11,777
08 Jul 2015 1,524 10,257 0 11,781
09 Jul 2015 1,516 10,261 0 11,777
10 Jul 2015 1,494 10,209 0 11,703
11 Jul 2015 1,506 10,272 0 11,778
12 Jul 2015 1,470 10,193 0 11,663""")
|
Add parser for weekly logimport sys
import re
"""
To do:
- Add web interaction to get the string argument
directly from web instead of having to manually put the
argument as a command line argument
"""
try:
dates = sys.argv[1]
listDates = dates.split('\n')
xfConsumptionRegex = re.compile(r'[\d,]+$')
xfConsumptionWeekly = [int(re.sub(r',', '', re.search(xfConsumptionRegex, i).group())) for i in listDates]
sumXFConsumption = sum(xfConsumptionWeekly)
#print("List of Dates")
#print(listDates)
print("XF Consumption List")
print(xfConsumptionWeekly)
print("Sum of XF Consumption")
print(sumXFConsumption)
except IndexError:
print("""At least one argument required.
Example:
05 Jul 2015 1,546 10,260 0 11,806
06 Jul 2015 1,520 10,260 0 11,780
07 Jul 2015 1,530 10,247 0 11,777
08 Jul 2015 1,524 10,257 0 11,781
09 Jul 2015 1,516 10,261 0 11,777
10 Jul 2015 1,494 10,209 0 11,703
11 Jul 2015 1,506 10,272 0 11,778
12 Jul 2015 1,470 10,193 0 11,663""")
|
<commit_before><commit_msg>Add parser for weekly log<commit_after>import sys
import re
"""
To do:
- Add web interaction to get the string argument
directly from web instead of having to manually put the
argument as a command line argument
"""
try:
dates = sys.argv[1]
listDates = dates.split('\n')
xfConsumptionRegex = re.compile(r'[\d,]+$')
xfConsumptionWeekly = [int(re.sub(r',', '', re.search(xfConsumptionRegex, i).group())) for i in listDates]
sumXFConsumption = sum(xfConsumptionWeekly)
#print("List of Dates")
#print(listDates)
print("XF Consumption List")
print(xfConsumptionWeekly)
print("Sum of XF Consumption")
print(sumXFConsumption)
except IndexError:
print("""At least one argument required.
Example:
05 Jul 2015 1,546 10,260 0 11,806
06 Jul 2015 1,520 10,260 0 11,780
07 Jul 2015 1,530 10,247 0 11,777
08 Jul 2015 1,524 10,257 0 11,781
09 Jul 2015 1,516 10,261 0 11,777
10 Jul 2015 1,494 10,209 0 11,703
11 Jul 2015 1,506 10,272 0 11,778
12 Jul 2015 1,470 10,193 0 11,663""")
|
|
833d0ee1622530200ebd2614bc6939abba30493c
|
setup/bin/swc-nano-installer.py
|
setup/bin/swc-nano-installer.py
|
#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
Add a Nano installer for Windows
|
Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor
|
Python
|
bsd-2-clause
|
selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,selimnairb/2014-02-25-swctest,swcarpentry/windows-installer,wking/swc-windows-installer,ethanwhite/windows-installer
|
Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor
|
#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor<commit_after>
|
#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a Nano installer for Windows
1. Downloads and installs Nano into the users home directory
2. Adds Nano to the path
3. Makes Nano the default editor<commit_after>#!/usr/bin/env python
"""Software Carpentry Nano Installer for Windows
Installs nano and makes it the default editor in msysgit
To use:
1. Install Python
2. Install msysgit
http://code.google.com/p/msysgit/downloads/list?q=full+installer+official+git
3. Run swc_nano_installer.py
You should be able to simply double click the file in Windows
This is a stripped down version of swc_windows_installer.py
originally written by Ethan White and W. Trevor Price.
"""
try: # Python 3
from io import BytesIO as _BytesIO
except ImportError: # Python 2
from StringIO import StringIO as _BytesIO
import os
try: # Python 3
from urllib.request import urlopen as _urlopen
except ImportError: # Python 2
from urllib2 import urlopen as _urlopen
import zipfile
def install_nano(install_dir):
"""Download and install the nano text editor"""
url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip"
r = _urlopen(url)
nano_zip_content = _BytesIO(r.read())
nano_zip = zipfile.ZipFile(nano_zip_content)
nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll',
'cygiconv-2.dll', 'cyggcc_s-1.dll']
for file_name in nano_files:
nano_zip.extract(file_name, install_dir)
def make_bashrc(home_dir, nano_dir):
"""Creates a .bashrc file for nano setup
Adds nano to the path and sets the default editor to nano
"""
nano_path = make_posix_path(nano_dir)
contents = '\n'.join([
'export PATH=$PATH:%s' % nano_path,
'export EDITOR=nano',
'',
])
with open(os.path.join(home_dir, '.bashrc'), 'w') as f:
f.write(contents)
def make_posix_path(windows_path):
"""Convert a Windows path to a posix path"""
return windows_path.replace('\\', '/').replace('C:', '/c')
def main():
home_dir = os.path.expanduser("~")
nano_dir = os.path.join(home_dir, '.nano')
#home_dir = "/home/ethan/swc-nano-test"
if not os.path.exists(nano_dir):
os.makedirs(nano_dir)
install_nano(nano_dir)
make_bashrc(home_dir, nano_dir)
if __name__ == '__main__':
main()
|
|
2ab1e008ab5626b96767bb40a6e365c464019d0f
|
skrt/text.py
|
skrt/text.py
|
FG_COLORS = {
'black' : '30',
'red' : '31',
'green' : '32',
'yellow' : '33',
'blue' : '34',
'purple' : '35',
'cyan' : '36',
'white' : '37',
}
FXS = {
'normal' : '0',
'bold' : '1',
'underline': '4',
}
BG_COLORS = {
'black' : '40',
'red' : '41',
'green' : '42',
'yellow' : '44',
'blue' : '44',
'purple' : '45',
'cyan' : '46',
'white' : '47',
}
ESCAPE = '\033['
def color(string, fg=None, fx=None, bg=None):
keys = (fg, fx, bg)
tables = (FG_COLORS, FXS, BG_COLORS)
codes = [table[key] for table, key in zip(tables, keys) if key is not None]
return ESCAPE + ';'.join(codes) + 'm' + string + ESCAPE + '0m'
|
Add utility for terminal colors
|
Add utility for terminal colors
|
Python
|
mit
|
nvander1/skrt
|
Add utility for terminal colors
|
FG_COLORS = {
'black' : '30',
'red' : '31',
'green' : '32',
'yellow' : '33',
'blue' : '34',
'purple' : '35',
'cyan' : '36',
'white' : '37',
}
FXS = {
'normal' : '0',
'bold' : '1',
'underline': '4',
}
BG_COLORS = {
'black' : '40',
'red' : '41',
'green' : '42',
'yellow' : '44',
'blue' : '44',
'purple' : '45',
'cyan' : '46',
'white' : '47',
}
ESCAPE = '\033['
def color(string, fg=None, fx=None, bg=None):
keys = (fg, fx, bg)
tables = (FG_COLORS, FXS, BG_COLORS)
codes = [table[key] for table, key in zip(tables, keys) if key is not None]
return ESCAPE + ';'.join(codes) + 'm' + string + ESCAPE + '0m'
|
<commit_before><commit_msg>Add utility for terminal colors<commit_after>
|
FG_COLORS = {
'black' : '30',
'red' : '31',
'green' : '32',
'yellow' : '33',
'blue' : '34',
'purple' : '35',
'cyan' : '36',
'white' : '37',
}
FXS = {
'normal' : '0',
'bold' : '1',
'underline': '4',
}
BG_COLORS = {
'black' : '40',
'red' : '41',
'green' : '42',
'yellow' : '44',
'blue' : '44',
'purple' : '45',
'cyan' : '46',
'white' : '47',
}
ESCAPE = '\033['
def color(string, fg=None, fx=None, bg=None):
keys = (fg, fx, bg)
tables = (FG_COLORS, FXS, BG_COLORS)
codes = [table[key] for table, key in zip(tables, keys) if key is not None]
return ESCAPE + ';'.join(codes) + 'm' + string + ESCAPE + '0m'
|
Add utility for terminal colorsFG_COLORS = {
'black' : '30',
'red' : '31',
'green' : '32',
'yellow' : '33',
'blue' : '34',
'purple' : '35',
'cyan' : '36',
'white' : '37',
}
FXS = {
'normal' : '0',
'bold' : '1',
'underline': '4',
}
BG_COLORS = {
'black' : '40',
'red' : '41',
'green' : '42',
'yellow' : '44',
'blue' : '44',
'purple' : '45',
'cyan' : '46',
'white' : '47',
}
ESCAPE = '\033['
def color(string, fg=None, fx=None, bg=None):
keys = (fg, fx, bg)
tables = (FG_COLORS, FXS, BG_COLORS)
codes = [table[key] for table, key in zip(tables, keys) if key is not None]
return ESCAPE + ';'.join(codes) + 'm' + string + ESCAPE + '0m'
|
<commit_before><commit_msg>Add utility for terminal colors<commit_after>FG_COLORS = {
'black' : '30',
'red' : '31',
'green' : '32',
'yellow' : '33',
'blue' : '34',
'purple' : '35',
'cyan' : '36',
'white' : '37',
}
FXS = {
'normal' : '0',
'bold' : '1',
'underline': '4',
}
BG_COLORS = {
'black' : '40',
'red' : '41',
'green' : '42',
'yellow' : '44',
'blue' : '44',
'purple' : '45',
'cyan' : '46',
'white' : '47',
}
ESCAPE = '\033['
def color(string, fg=None, fx=None, bg=None):
keys = (fg, fx, bg)
tables = (FG_COLORS, FXS, BG_COLORS)
codes = [table[key] for table, key in zip(tables, keys) if key is not None]
return ESCAPE + ';'.join(codes) + 'm' + string + ESCAPE + '0m'
|
|
33f651c3f6e697b2a9a2bf30006b1d2facaba103
|
fmn/rules/fas.py
|
fmn/rules/fas.py
|
def fas_group_create(config, message):
""" Fas: New group created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.create')
def fas_group_member_apply(config, message):
""" Fas: A member requested to join a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.apply')
def fas_group_member_remove(config, message):
""" Fas: A user was removed from a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.remove')
def fas_group_member_sponsor(config, message):
""" Fas: A user has been sponsored by an authorized user into a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.sponsor')
def fas_group_update(config, message):
""" Fas: A group's properties have been modified.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.update')
def fas_role_update(config, message):
""" A user's role in a particular group has been updated.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.role.update')
def fas_user_create(config, message):
""" Fas: A new user account has been created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.create')
def fas_user_update(config, message):
""" Fas: A user updated his/her account.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.update')
|
Add filters for the FAS messages
|
Add filters for the FAS messages
|
Python
|
lgpl-2.1
|
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
|
Add filters for the FAS messages
|
def fas_group_create(config, message):
""" Fas: New group created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.create')
def fas_group_member_apply(config, message):
""" Fas: A member requested to join a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.apply')
def fas_group_member_remove(config, message):
""" Fas: A user was removed from a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.remove')
def fas_group_member_sponsor(config, message):
""" Fas: A user has been sponsored by an authorized user into a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.sponsor')
def fas_group_update(config, message):
""" Fas: A group's properties have been modified.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.update')
def fas_role_update(config, message):
""" A user's role in a particular group has been updated.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.role.update')
def fas_user_create(config, message):
""" Fas: A new user account has been created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.create')
def fas_user_update(config, message):
""" Fas: A user updated his/her account.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.update')
|
<commit_before><commit_msg>Add filters for the FAS messages<commit_after>
|
def fas_group_create(config, message):
""" Fas: New group created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.create')
def fas_group_member_apply(config, message):
""" Fas: A member requested to join a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.apply')
def fas_group_member_remove(config, message):
""" Fas: A user was removed from a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.remove')
def fas_group_member_sponsor(config, message):
""" Fas: A user has been sponsored by an authorized user into a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.sponsor')
def fas_group_update(config, message):
""" Fas: A group's properties have been modified.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.update')
def fas_role_update(config, message):
""" A user's role in a particular group has been updated.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.role.update')
def fas_user_create(config, message):
""" Fas: A new user account has been created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.create')
def fas_user_update(config, message):
""" Fas: A user updated his/her account.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.update')
|
Add filters for the FAS messagesdef fas_group_create(config, message):
""" Fas: New group created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.create')
def fas_group_member_apply(config, message):
""" Fas: A member requested to join a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.apply')
def fas_group_member_remove(config, message):
""" Fas: A user was removed from a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.remove')
def fas_group_member_sponsor(config, message):
""" Fas: A user has been sponsored by an authorized user into a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.sponsor')
def fas_group_update(config, message):
""" Fas: A group's properties have been modified.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.update')
def fas_role_update(config, message):
""" A user's role in a particular group has been updated.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.role.update')
def fas_user_create(config, message):
""" Fas: A new user account has been created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.create')
def fas_user_update(config, message):
""" Fas: A user updated his/her account.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.update')
|
<commit_before><commit_msg>Add filters for the FAS messages<commit_after>def fas_group_create(config, message):
""" Fas: New group created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.create')
def fas_group_member_apply(config, message):
""" Fas: A member requested to join a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.apply')
def fas_group_member_remove(config, message):
""" Fas: A user was removed from a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.remove')
def fas_group_member_sponsor(config, message):
""" Fas: A user has been sponsored by an authorized user into a group.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.member.sponsor')
def fas_group_update(config, message):
""" Fas: A group's properties have been modified.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.group.update')
def fas_role_update(config, message):
""" A user's role in a particular group has been updated.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.role.update')
def fas_user_create(config, message):
""" Fas: A new user account has been created.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.create')
def fas_user_update(config, message):
""" Fas: A user updated his/her account.
TODO description for the web interface goes here
"""
return message['topic'].endswith('fas.user.update')
|
|
069e6a31208e98d565811216aea1b5a4b18f4391
|
tests/api.py
|
tests/api.py
|
import unittest
import carseour
"""
Run tests from the main carseour directory: python -m unittest tests.api
"""
class TestAPI(unittest.TestCase):
def setUp(self):
self.data = carseour.live()
def tearDown(self):
pass
def test_valid_api(self):
self.assertEqual(self.data.mVersion, carseour.definitions.SHARED_MEMORY_VERSION)
def test_wheels(self):
wheels = self.data.wheels()
idx = 0
for wheel in wheels:
self.assertEqual(wheel['tyre']['rps'], self.data.mTyreRPS[idx])
idx += 1
def test_players(self):
players = self.data.players()
self.assertGreater(len(players), 0)
self.assertEqual(len(players), self.data.mNumParticipants)
|
Add a few tests to see that we actually can retrieve information from the API and that our helper methods seem to work
|
Add a few tests to see that we actually can retrieve information from the API and that our helper methods seem to work
|
Python
|
mit
|
matslindh/carseour,matslindh/carseour
|
Add a few tests to see that we actually can retrieve information from the API and that our helper methods seem to work
|
import unittest
import carseour
"""
Run tests from the main carseour directory: python -m unittest tests.api
"""
class TestAPI(unittest.TestCase):
def setUp(self):
self.data = carseour.live()
def tearDown(self):
pass
def test_valid_api(self):
self.assertEqual(self.data.mVersion, carseour.definitions.SHARED_MEMORY_VERSION)
def test_wheels(self):
wheels = self.data.wheels()
idx = 0
for wheel in wheels:
self.assertEqual(wheel['tyre']['rps'], self.data.mTyreRPS[idx])
idx += 1
def test_players(self):
players = self.data.players()
self.assertGreater(len(players), 0)
self.assertEqual(len(players), self.data.mNumParticipants)
|
<commit_before><commit_msg>Add a few tests to see that we actually can retrieve information from the API and that our helper methods seem to work<commit_after>
|
import unittest
import carseour
"""
Run tests from the main carseour directory: python -m unittest tests.api
"""
class TestAPI(unittest.TestCase):
def setUp(self):
self.data = carseour.live()
def tearDown(self):
pass
def test_valid_api(self):
self.assertEqual(self.data.mVersion, carseour.definitions.SHARED_MEMORY_VERSION)
def test_wheels(self):
wheels = self.data.wheels()
idx = 0
for wheel in wheels:
self.assertEqual(wheel['tyre']['rps'], self.data.mTyreRPS[idx])
idx += 1
def test_players(self):
players = self.data.players()
self.assertGreater(len(players), 0)
self.assertEqual(len(players), self.data.mNumParticipants)
|
Add a few tests to see that we actually can retrieve information from the API and that our helper methods seem to workimport unittest
import carseour
"""
Run tests from the main carseour directory: python -m unittest tests.api
"""
class TestAPI(unittest.TestCase):
def setUp(self):
self.data = carseour.live()
def tearDown(self):
pass
def test_valid_api(self):
self.assertEqual(self.data.mVersion, carseour.definitions.SHARED_MEMORY_VERSION)
def test_wheels(self):
wheels = self.data.wheels()
idx = 0
for wheel in wheels:
self.assertEqual(wheel['tyre']['rps'], self.data.mTyreRPS[idx])
idx += 1
def test_players(self):
players = self.data.players()
self.assertGreater(len(players), 0)
self.assertEqual(len(players), self.data.mNumParticipants)
|
<commit_before><commit_msg>Add a few tests to see that we actually can retrieve information from the API and that our helper methods seem to work<commit_after>import unittest
import carseour
"""
Run tests from the main carseour directory: python -m unittest tests.api
"""
class TestAPI(unittest.TestCase):
def setUp(self):
self.data = carseour.live()
def tearDown(self):
pass
def test_valid_api(self):
self.assertEqual(self.data.mVersion, carseour.definitions.SHARED_MEMORY_VERSION)
def test_wheels(self):
wheels = self.data.wheels()
idx = 0
for wheel in wheels:
self.assertEqual(wheel['tyre']['rps'], self.data.mTyreRPS[idx])
idx += 1
def test_players(self):
players = self.data.players()
self.assertGreater(len(players), 0)
self.assertEqual(len(players), self.data.mNumParticipants)
|
|
be82e8070e01c24ca909171e1b6f0bac4edeafb6
|
broad_scan.py
|
broad_scan.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 25 19:43:13 2015
@author: jensv
Do a broad scan of skin geometry, epsilo, and spline knots.
"""
import numpy as np
import skin_core_scanner_simple as scss
lambda_bar_space = [0.01, 6., 75]
k_bar_space = [0.01, 3., 75]
for skin_width in np.logspace(0.001, 0.9, 25):
for transition_width in np.logspace(0.001, (1. - skin_width)/2., 25):
core_radius = 1. - 2.*transition_width - skin_width
for epsilon in np.logspace(0.01, 1., 25):
for points_skin in np.linspace(20, 200, 5):
for points_transition in np.linspace(50, 500, 5):
scss.scan_lambda_k_space(lambda_bar_space, k_bar_space,
skin_width=skin_width,
transition_width=transition_width,
core_radius=core_radius,
epsilon=epsilon,
points_skin=points_skin,
points_transition=points_transition)
|
Add a script to run a broad scan.
|
Add a script to run a broad scan.
A broad scan of skin geometry, epsilo, and spline knots.
|
Python
|
mit
|
jensv/fluxtubestability,jensv/fluxtubestability
|
Add a script to run a broad scan.
A broad scan of skin geometry, epsilo, and spline knots.
|
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 25 19:43:13 2015
@author: jensv
Do a broad scan of skin geometry, epsilo, and spline knots.
"""
import numpy as np
import skin_core_scanner_simple as scss
lambda_bar_space = [0.01, 6., 75]
k_bar_space = [0.01, 3., 75]
for skin_width in np.logspace(0.001, 0.9, 25):
for transition_width in np.logspace(0.001, (1. - skin_width)/2., 25):
core_radius = 1. - 2.*transition_width - skin_width
for epsilon in np.logspace(0.01, 1., 25):
for points_skin in np.linspace(20, 200, 5):
for points_transition in np.linspace(50, 500, 5):
scss.scan_lambda_k_space(lambda_bar_space, k_bar_space,
skin_width=skin_width,
transition_width=transition_width,
core_radius=core_radius,
epsilon=epsilon,
points_skin=points_skin,
points_transition=points_transition)
|
<commit_before><commit_msg>Add a script to run a broad scan.
A broad scan of skin geometry, epsilo, and spline knots.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 25 19:43:13 2015
@author: jensv
Do a broad scan of skin geometry, epsilo, and spline knots.
"""
import numpy as np
import skin_core_scanner_simple as scss
lambda_bar_space = [0.01, 6., 75]
k_bar_space = [0.01, 3., 75]
for skin_width in np.logspace(0.001, 0.9, 25):
for transition_width in np.logspace(0.001, (1. - skin_width)/2., 25):
core_radius = 1. - 2.*transition_width - skin_width
for epsilon in np.logspace(0.01, 1., 25):
for points_skin in np.linspace(20, 200, 5):
for points_transition in np.linspace(50, 500, 5):
scss.scan_lambda_k_space(lambda_bar_space, k_bar_space,
skin_width=skin_width,
transition_width=transition_width,
core_radius=core_radius,
epsilon=epsilon,
points_skin=points_skin,
points_transition=points_transition)
|
Add a script to run a broad scan.
A broad scan of skin geometry, epsilo, and spline knots.# -*- coding: utf-8 -*-
"""
Created on Sun Oct 25 19:43:13 2015
@author: jensv
Do a broad scan of skin geometry, epsilo, and spline knots.
"""
import numpy as np
import skin_core_scanner_simple as scss
lambda_bar_space = [0.01, 6., 75]
k_bar_space = [0.01, 3., 75]
for skin_width in np.logspace(0.001, 0.9, 25):
for transition_width in np.logspace(0.001, (1. - skin_width)/2., 25):
core_radius = 1. - 2.*transition_width - skin_width
for epsilon in np.logspace(0.01, 1., 25):
for points_skin in np.linspace(20, 200, 5):
for points_transition in np.linspace(50, 500, 5):
scss.scan_lambda_k_space(lambda_bar_space, k_bar_space,
skin_width=skin_width,
transition_width=transition_width,
core_radius=core_radius,
epsilon=epsilon,
points_skin=points_skin,
points_transition=points_transition)
|
<commit_before><commit_msg>Add a script to run a broad scan.
A broad scan of skin geometry, epsilo, and spline knots.<commit_after># -*- coding: utf-8 -*-
"""
Created on Sun Oct 25 19:43:13 2015
@author: jensv
Do a broad scan of skin geometry, epsilo, and spline knots.
"""
import numpy as np
import skin_core_scanner_simple as scss
lambda_bar_space = [0.01, 6., 75]
k_bar_space = [0.01, 3., 75]
for skin_width in np.logspace(0.001, 0.9, 25):
for transition_width in np.logspace(0.001, (1. - skin_width)/2., 25):
core_radius = 1. - 2.*transition_width - skin_width
for epsilon in np.logspace(0.01, 1., 25):
for points_skin in np.linspace(20, 200, 5):
for points_transition in np.linspace(50, 500, 5):
scss.scan_lambda_k_space(lambda_bar_space, k_bar_space,
skin_width=skin_width,
transition_width=transition_width,
core_radius=core_radius,
epsilon=epsilon,
points_skin=points_skin,
points_transition=points_transition)
|
|
eb6d70524bf68cbd151958fbd82689c7e7f4abd1
|
test/test_service.py
|
test/test_service.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.synthesis.service import Service
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication, QPushButton
from PyQt5.QtWidgets import QWidget, QLabel
except ImportError:
from PyQt4.Qt import QApplication, QPushButton
from PyQt4.Qt import QWidget, QLabel
class TestServicesView(unittest2.TestCase):
"""
This file test the Service class.
"""
service = {
'name': 'My Service',
'ls_state': 'OK',
'ls_last_check': 0.0,
'ls_output': 'Output of the service'
}
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.widget = QWidget()
except:
pass
def test_init_view(self):
"""Initialize Service"""
under_test = Service()
self.assertIsNotNone(under_test.acknowledged)
self.assertIsNotNone(under_test.downtimed)
self.assertIsNone(under_test.service)
self.assertIsNone(under_test.acknowledge_btn)
self.assertIsNone(under_test.downtime_btn)
under_test.initialize(self.service)
self.assertIsNotNone(under_test.service)
self.assertIsNotNone(under_test.acknowledge_btn)
self.assertIsNotNone(under_test.downtime_btn)
|
Add Unit Tests for Service
|
Add Unit Tests for Service
Ref #127
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-app,Alignak-monitoring-contrib/alignak-app
|
Add Unit Tests for Service
Ref #127
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.synthesis.service import Service
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication, QPushButton
from PyQt5.QtWidgets import QWidget, QLabel
except ImportError:
from PyQt4.Qt import QApplication, QPushButton
from PyQt4.Qt import QWidget, QLabel
class TestServicesView(unittest2.TestCase):
"""
This file test the Service class.
"""
service = {
'name': 'My Service',
'ls_state': 'OK',
'ls_last_check': 0.0,
'ls_output': 'Output of the service'
}
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.widget = QWidget()
except:
pass
def test_init_view(self):
"""Initialize Service"""
under_test = Service()
self.assertIsNotNone(under_test.acknowledged)
self.assertIsNotNone(under_test.downtimed)
self.assertIsNone(under_test.service)
self.assertIsNone(under_test.acknowledge_btn)
self.assertIsNone(under_test.downtime_btn)
under_test.initialize(self.service)
self.assertIsNotNone(under_test.service)
self.assertIsNotNone(under_test.acknowledge_btn)
self.assertIsNotNone(under_test.downtime_btn)
|
<commit_before><commit_msg>Add Unit Tests for Service
Ref #127<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.synthesis.service import Service
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication, QPushButton
from PyQt5.QtWidgets import QWidget, QLabel
except ImportError:
from PyQt4.Qt import QApplication, QPushButton
from PyQt4.Qt import QWidget, QLabel
class TestServicesView(unittest2.TestCase):
"""
This file test the Service class.
"""
service = {
'name': 'My Service',
'ls_state': 'OK',
'ls_last_check': 0.0,
'ls_output': 'Output of the service'
}
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.widget = QWidget()
except:
pass
def test_init_view(self):
"""Initialize Service"""
under_test = Service()
self.assertIsNotNone(under_test.acknowledged)
self.assertIsNotNone(under_test.downtimed)
self.assertIsNone(under_test.service)
self.assertIsNone(under_test.acknowledge_btn)
self.assertIsNone(under_test.downtime_btn)
under_test.initialize(self.service)
self.assertIsNotNone(under_test.service)
self.assertIsNotNone(under_test.acknowledge_btn)
self.assertIsNotNone(under_test.downtime_btn)
|
Add Unit Tests for Service
Ref #127#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.synthesis.service import Service
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication, QPushButton
from PyQt5.QtWidgets import QWidget, QLabel
except ImportError:
from PyQt4.Qt import QApplication, QPushButton
from PyQt4.Qt import QWidget, QLabel
class TestServicesView(unittest2.TestCase):
"""
This file test the Service class.
"""
service = {
'name': 'My Service',
'ls_state': 'OK',
'ls_last_check': 0.0,
'ls_output': 'Output of the service'
}
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.widget = QWidget()
except:
pass
def test_init_view(self):
"""Initialize Service"""
under_test = Service()
self.assertIsNotNone(under_test.acknowledged)
self.assertIsNotNone(under_test.downtimed)
self.assertIsNone(under_test.service)
self.assertIsNone(under_test.acknowledge_btn)
self.assertIsNone(under_test.downtime_btn)
under_test.initialize(self.service)
self.assertIsNotNone(under_test.service)
self.assertIsNotNone(under_test.acknowledge_btn)
self.assertIsNotNone(under_test.downtime_btn)
|
<commit_before><commit_msg>Add Unit Tests for Service
Ref #127<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import sys
import unittest2
from alignak_app.synthesis.service import Service
try:
__import__('PyQt5')
from PyQt5.QtWidgets import QApplication, QPushButton
from PyQt5.QtWidgets import QWidget, QLabel
except ImportError:
from PyQt4.Qt import QApplication, QPushButton
from PyQt4.Qt import QWidget, QLabel
class TestServicesView(unittest2.TestCase):
"""
This file test the Service class.
"""
service = {
'name': 'My Service',
'ls_state': 'OK',
'ls_last_check': 0.0,
'ls_output': 'Output of the service'
}
@classmethod
def setUpClass(cls):
"""Create QApplication"""
try:
cls.app = QApplication(sys.argv)
cls.widget = QWidget()
except:
pass
def test_init_view(self):
"""Initialize Service"""
under_test = Service()
self.assertIsNotNone(under_test.acknowledged)
self.assertIsNotNone(under_test.downtimed)
self.assertIsNone(under_test.service)
self.assertIsNone(under_test.acknowledge_btn)
self.assertIsNone(under_test.downtime_btn)
under_test.initialize(self.service)
self.assertIsNotNone(under_test.service)
self.assertIsNotNone(under_test.acknowledge_btn)
self.assertIsNotNone(under_test.downtime_btn)
|
|
26c78aacc8e632290dce532de2540f94d85da062
|
migration/versions/551819450a3c_display_name.py
|
migration/versions/551819450a3c_display_name.py
|
"""display_name
Add display_name field to User (again)
Revision ID: 551819450a3c
Revises: 187dd4ba924a
Create Date: 2013-04-08 21:43:51.436466
"""
#
#
# revision identifiers, used by Alembic.
revision = '551819450a3c'
down_revision = '187dd4ba924a'
from alembic import op
from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('users', sa.Column('display_name', sa.Unicode(255), nullable=True, default=None))
def downgrade():
op.drop_column('users', 'display_name')
|
Add migration script for added display_name column in a4919a7
|
Add migration script for added display_name column in a4919a7
|
Python
|
agpl-3.0
|
moschlar/SAUCE,moschlar/SAUCE,moschlar/SAUCE,moschlar/SAUCE
|
Add migration script for added display_name column in a4919a7
|
"""display_name
Add display_name field to User (again)
Revision ID: 551819450a3c
Revises: 187dd4ba924a
Create Date: 2013-04-08 21:43:51.436466
"""
#
#
# revision identifiers, used by Alembic.
revision = '551819450a3c'
down_revision = '187dd4ba924a'
from alembic import op
from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('users', sa.Column('display_name', sa.Unicode(255), nullable=True, default=None))
def downgrade():
op.drop_column('users', 'display_name')
|
<commit_before><commit_msg>Add migration script for added display_name column in a4919a7<commit_after>
|
"""display_name
Add display_name field to User (again)
Revision ID: 551819450a3c
Revises: 187dd4ba924a
Create Date: 2013-04-08 21:43:51.436466
"""
#
#
# revision identifiers, used by Alembic.
revision = '551819450a3c'
down_revision = '187dd4ba924a'
from alembic import op
from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('users', sa.Column('display_name', sa.Unicode(255), nullable=True, default=None))
def downgrade():
op.drop_column('users', 'display_name')
|
Add migration script for added display_name column in a4919a7"""display_name
Add display_name field to User (again)
Revision ID: 551819450a3c
Revises: 187dd4ba924a
Create Date: 2013-04-08 21:43:51.436466
"""
#
#
# revision identifiers, used by Alembic.
revision = '551819450a3c'
down_revision = '187dd4ba924a'
from alembic import op
from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('users', sa.Column('display_name', sa.Unicode(255), nullable=True, default=None))
def downgrade():
op.drop_column('users', 'display_name')
|
<commit_before><commit_msg>Add migration script for added display_name column in a4919a7<commit_after>"""display_name
Add display_name field to User (again)
Revision ID: 551819450a3c
Revises: 187dd4ba924a
Create Date: 2013-04-08 21:43:51.436466
"""
#
#
# revision identifiers, used by Alembic.
revision = '551819450a3c'
down_revision = '187dd4ba924a'
from alembic import op
from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('users', sa.Column('display_name', sa.Unicode(255), nullable=True, default=None))
def downgrade():
op.drop_column('users', 'display_name')
|
|
e954624c56348f484d8e99c595770582281f4a02
|
upgrade_db.py
|
upgrade_db.py
|
#!/usr/bin/env python3
import json
import sys
from pathlib import Path
from alembic.config import main as alembic
if __name__ == '__main__':
path = sys.argv[1] if len(sys.argv) >= 2 else 'config.json'
with open(path) as f:
config = json.load(f)
url = config['db']['connect_string']
alembic_opts = [
'-c',
str(
Path(__file__).resolve().parent
/ 'src' / 'cardinal' / 'db' / 'migrations' / 'alembic.ini'
),
'-x',
'url=' + url,
'upgrade',
'head'
]
alembic(alembic_opts)
|
Add basic script to apply migrations
|
Add basic script to apply migrations
|
Python
|
mit
|
FallenWarrior2k/cardinal.py,FallenWarrior2k/cardinal.py
|
Add basic script to apply migrations
|
#!/usr/bin/env python3
import json
import sys
from pathlib import Path
from alembic.config import main as alembic
if __name__ == '__main__':
path = sys.argv[1] if len(sys.argv) >= 2 else 'config.json'
with open(path) as f:
config = json.load(f)
url = config['db']['connect_string']
alembic_opts = [
'-c',
str(
Path(__file__).resolve().parent
/ 'src' / 'cardinal' / 'db' / 'migrations' / 'alembic.ini'
),
'-x',
'url=' + url,
'upgrade',
'head'
]
alembic(alembic_opts)
|
<commit_before><commit_msg>Add basic script to apply migrations<commit_after>
|
#!/usr/bin/env python3
import json
import sys
from pathlib import Path
from alembic.config import main as alembic
if __name__ == '__main__':
path = sys.argv[1] if len(sys.argv) >= 2 else 'config.json'
with open(path) as f:
config = json.load(f)
url = config['db']['connect_string']
alembic_opts = [
'-c',
str(
Path(__file__).resolve().parent
/ 'src' / 'cardinal' / 'db' / 'migrations' / 'alembic.ini'
),
'-x',
'url=' + url,
'upgrade',
'head'
]
alembic(alembic_opts)
|
Add basic script to apply migrations#!/usr/bin/env python3
import json
import sys
from pathlib import Path
from alembic.config import main as alembic
if __name__ == '__main__':
path = sys.argv[1] if len(sys.argv) >= 2 else 'config.json'
with open(path) as f:
config = json.load(f)
url = config['db']['connect_string']
alembic_opts = [
'-c',
str(
Path(__file__).resolve().parent
/ 'src' / 'cardinal' / 'db' / 'migrations' / 'alembic.ini'
),
'-x',
'url=' + url,
'upgrade',
'head'
]
alembic(alembic_opts)
|
<commit_before><commit_msg>Add basic script to apply migrations<commit_after>#!/usr/bin/env python3
import json
import sys
from pathlib import Path
from alembic.config import main as alembic
if __name__ == '__main__':
path = sys.argv[1] if len(sys.argv) >= 2 else 'config.json'
with open(path) as f:
config = json.load(f)
url = config['db']['connect_string']
alembic_opts = [
'-c',
str(
Path(__file__).resolve().parent
/ 'src' / 'cardinal' / 'db' / 'migrations' / 'alembic.ini'
),
'-x',
'url=' + url,
'upgrade',
'head'
]
alembic(alembic_opts)
|
|
1a398980d05af4daf5410c7c40f22c2e8f52d5f2
|
lms/djangoapps/django_comment_client/tests/test_middleware.py
|
lms/djangoapps/django_comment_client/tests/test_middleware.py
|
import string
import random
import collections
from django.test import TestCase
import comment_client
import django.http
import django_comment_client.middleware as middleware
class AjaxExceptionTestCase(TestCase):
# TODO: check whether the correct error message is produced.
# The error message should be the same as the argument to CommentClientError
def setUp(self):
self.a = middleware.AjaxExceptionMiddleware()
self.request1 = django.http.HttpRequest()
self.request0 = django.http.HttpRequest()
self.exception1 = comment_client.CommentClientError('{}')
self.exception0 = ValueError()
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
def test_process_exception(self):
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
Add tests for diango-comment-client middleware
|
Add tests for diango-comment-client middleware
|
Python
|
agpl-3.0
|
vasyarv/edx-platform,TsinghuaX/edx-platform,ovnicraft/edx-platform,eduNEXT/edunext-platform,pku9104038/edx-platform,proversity-org/edx-platform,vikas1885/test1,xinjiguaike/edx-platform,miptliot/edx-platform,stvstnfrd/edx-platform,ahmadiga/min_edx,rue89-tech/edx-platform,SivilTaram/edx-platform,apigee/edx-platform,LICEF/edx-platform,alexthered/kienhoc-platform,deepsrijit1105/edx-platform,dkarakats/edx-platform,devs1991/test_edx_docmode,antonve/s4-project-mooc,mitocw/edx-platform,ak2703/edx-platform,analyseuc3m/ANALYSE-v1,shubhdev/openedx,UXE/local-edx,ZLLab-Mooc/edx-platform,JioEducation/edx-platform,andyzsf/edx,abdoosh00/edx-rtl-final,mbareta/edx-platform-ft,xingyepei/edx-platform,jazkarta/edx-platform-for-isc,nttks/jenkins-test,devs1991/test_edx_docmode,shashank971/edx-platform,dsajkl/reqiop,nttks/edx-platform,kmoocdev/edx-platform,analyseuc3m/ANALYSE-v1,atsolakid/edx-platform,martynovp/edx-platform,cecep-edu/edx-platform,kamalx/edx-platform,tiagochiavericosta/edx-platform,eestay/edx-platform,hastexo/edx-platform,iivic/BoiseStateX,ampax/edx-platform-backup,devs1991/test_edx_docmode,cyanna/edx-platform,B-MOOC/edx-platform,mbareta/edx-platform-ft,vismartltd/edx-platform,ak2703/edx-platform,lduarte1991/edx-platform,chauhanhardik/populo_2,nanolearningllc/edx-platform-cypress,yokose-ks/edx-platform,pomegranited/edx-platform,wwj718/ANALYSE,mtlchun/edx,iivic/BoiseStateX,fly19890211/edx-platform,LICEF/edx-platform,polimediaupv/edx-platform,zofuthan/edx-platform,ahmadiga/min_edx,LearnEra/LearnEraPlaftform,hkawasaki/kawasaki-aio8-2,jazkarta/edx-platform-for-isc,jbassen/edx-platform,DefyVentures/edx-platform,appsembler/edx-platform,shubhdev/edxOnBaadal,benpatterson/edx-platform,ahmadiga/min_edx,unicri/edx-platform,EduPepperPD/pepper2013,rhndg/openedx,ZLLab-Mooc/edx-platform,chauhanhardik/populo,devs1991/test_edx_docmode,cognitiveclass/edx-platform,ferabra/edx-platform,defance/edx-platform,cpennington/edx-platform,halvertoluke/edx-platform,cognitiveclass/edx-platform,ubc/edx-platform,polimediaupv/edx-platform,solashirai/edx-platform,don-github/edx-platform,etzhou/edx-platform,jolyonb/edx-platform,pku9104038/edx-platform,jazztpt/edx-platform,jbassen/edx-platform,iivic/BoiseStateX,valtech-mooc/edx-platform,lduarte1991/edx-platform,ahmadio/edx-platform,zofuthan/edx-platform,chand3040/cloud_that,kalebhartje/schoolboost,msegado/edx-platform,tanmaykm/edx-platform,abdoosh00/edraak,xinjiguaike/edx-platform,xuxiao19910803/edx,fintech-circle/edx-platform,hastexo/edx-platform,PepperPD/edx-pepper-platform,10clouds/edx-platform,etzhou/edx-platform,chauhanhardik/populo,carsongee/edx-platform,philanthropy-u/edx-platform,DefyVentures/edx-platform,motion2015/edx-platform,IndonesiaX/edx-platform,peterm-itr/edx-platform,yokose-ks/edx-platform,morpheby/levelup-by,cselis86/edx-platform,dsajkl/reqiop,angelapper/edx-platform,chauhanhardik/populo,hamzehd/edx-platform,don-github/edx-platform,halvertoluke/edx-platform,LearnEra/LearnEraPlaftform,shubhdev/edxOnBaadal,longmen21/edx-platform,unicri/edx-platform,ubc/edx-platform,shashank971/edx-platform,10clouds/edx-platform,shurihell/testasia,jbzdak/edx-platform,doismellburning/edx-platform,CredoReference/edx-platform,alu042/edx-platform,dcosentino/edx-platform,chudaol/edx-platform,mahendra-r/edx-platform,B-MOOC/edx-platform,Edraak/edx-platform,sudheerchintala/LearnEraPlatForm,tiagochiavericosta/edx-platform,jamesblunt/edx-platform,itsjeyd/edx-platform,RPI-OPENEDX/edx-platform,OmarIthawi/edx-platform,bigdatauniversity/edx-platform,louyihua/edx-platform,CredoReference/edx-platform,jamiefolsom/edx-platform,AkA84/edx-platform,simbs/edx-platform,ahmadio/edx-platform,morenopc/edx-platform,jswope00/GAI,torchingloom/edx-platform,cselis86/edx-platform,jamiefolsom/edx-platform,zofuthan/edx-platform,edx/edx-platform,franosincic/edx-platform,10clouds/edx-platform,zadgroup/edx-platform,stvstnfrd/edx-platform,shurihell/testasia,IITBinterns13/edx-platform-dev,peterm-itr/edx-platform,synergeticsedx/deployment-wipro,rationalAgent/edx-platform-custom,naresh21/synergetics-edx-platform,arbrandes/edx-platform,SravanthiSinha/edx-platform,rationalAgent/edx-platform-custom,LearnEra/LearnEraPlaftform,bitifirefly/edx-platform,y12uc231/edx-platform,miptliot/edx-platform,AkA84/edx-platform,a-parhom/edx-platform,marcore/edx-platform,chrisndodge/edx-platform,wwj718/ANALYSE,bitifirefly/edx-platform,kalebhartje/schoolboost,simbs/edx-platform,appliedx/edx-platform,jruiperezv/ANALYSE,rhndg/openedx,adoosii/edx-platform,antoviaque/edx-platform,MakeHer/edx-platform,cognitiveclass/edx-platform,rationalAgent/edx-platform-custom,arbrandes/edx-platform,ak2703/edx-platform,cpennington/edx-platform,arifsetiawan/edx-platform,doganov/edx-platform,shubhdev/edxOnBaadal,cpennington/edx-platform,DNFcode/edx-platform,sudheerchintala/LearnEraPlatForm,alexthered/kienhoc-platform,zerobatu/edx-platform,eemirtekin/edx-platform,morenopc/edx-platform,zubair-arbi/edx-platform,dsajkl/123,sudheerchintala/LearnEraPlatForm,utecuy/edx-platform,zadgroup/edx-platform,olexiim/edx-platform,carsongee/edx-platform,pelikanchik/edx-platform,xuxiao19910803/edx-platform,jazkarta/edx-platform-for-isc,beni55/edx-platform,Edraak/edraak-platform,ahmedaljazzar/edx-platform,utecuy/edx-platform,mbareta/edx-platform-ft,hkawasaki/kawasaki-aio8-2,hkawasaki/kawasaki-aio8-1,ovnicraft/edx-platform,ampax/edx-platform-backup,sameetb-cuelogic/edx-platform-test,pelikanchik/edx-platform,IITBinterns13/edx-platform-dev,ovnicraft/edx-platform,ampax/edx-platform,B-MOOC/edx-platform,rationalAgent/edx-platform-custom,shubhdev/openedx,motion2015/edx-platform,rhndg/openedx,procangroup/edx-platform,nikolas/edx-platform,kmoocdev/edx-platform,nanolearningllc/edx-platform-cypress,PepperPD/edx-pepper-platform,Ayub-Khan/edx-platform,antonve/s4-project-mooc,doganov/edx-platform,adoosii/edx-platform,halvertoluke/edx-platform,mahendra-r/edx-platform,msegado/edx-platform,philanthropy-u/edx-platform,wwj718/edx-platform,ahmedaljazzar/edx-platform,rismalrv/edx-platform,waheedahmed/edx-platform,EduPepperPDTesting/pepper2013-testing,mahendra-r/edx-platform,pdehaye/theming-edx-platform,Edraak/edx-platform,nagyistoce/edx-platform,jruiperezv/ANALYSE,Semi-global/edx-platform,nanolearningllc/edx-platform-cypress-2,SivilTaram/edx-platform,cselis86/edx-platform,jonathan-beard/edx-platform,analyseuc3m/ANALYSE-v1,itsjeyd/edx-platform,nanolearning/edx-platform,LICEF/edx-platform,Kalyzee/edx-platform,arifsetiawan/edx-platform,JCBarahona/edX,openfun/edx-platform,jamiefolsom/edx-platform,a-parhom/edx-platform,raccoongang/edx-platform,hastexo/edx-platform,cecep-edu/edx-platform,vasyarv/edx-platform,raccoongang/edx-platform,TeachAtTUM/edx-platform,Edraak/circleci-edx-platform,chauhanhardik/populo_2,rue89-tech/edx-platform,Endika/edx-platform,pabloborrego93/edx-platform,Lektorium-LLC/edx-platform,shubhdev/edx-platform,Kalyzee/edx-platform,abdoosh00/edraak,kursitet/edx-platform,sudheerchintala/LearnEraPlatForm,jamesblunt/edx-platform,mjirayu/sit_academy,procangroup/edx-platform,benpatterson/edx-platform,shubhdev/edx-platform,nttks/jenkins-test,dsajkl/reqiop,EduPepperPD/pepper2013,EduPepperPD/pepper2013,Edraak/edx-platform,DNFcode/edx-platform,RPI-OPENEDX/edx-platform,vismartltd/edx-platform,franosincic/edx-platform,naresh21/synergetics-edx-platform,naresh21/synergetics-edx-platform,PepperPD/edx-pepper-platform,Livit/Livit.Learn.EdX,kursitet/edx-platform,ESOedX/edx-platform,chand3040/cloud_that,shashank971/edx-platform,waheedahmed/edx-platform,TeachAtTUM/edx-platform,shabab12/edx-platform,dcosentino/edx-platform,dsajkl/123,RPI-OPENEDX/edx-platform,eemirtekin/edx-platform,eestay/edx-platform,cyanna/edx-platform,hmcmooc/muddx-platform,beacloudgenius/edx-platform,praveen-pal/edx-platform,etzhou/edx-platform,Semi-global/edx-platform,EduPepperPD/pepper2013,y12uc231/edx-platform,kursitet/edx-platform,fly19890211/edx-platform,bigdatauniversity/edx-platform,playm2mboy/edx-platform,franosincic/edx-platform,kmoocdev/edx-platform,utecuy/edx-platform,utecuy/edx-platform,miptliot/edx-platform,Endika/edx-platform,jjmiranda/edx-platform,bdero/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edx-platform,CourseTalk/edx-platform,UXE/local-edx,dsajkl/123,polimediaupv/edx-platform,mcgachey/edx-platform,mahendra-r/edx-platform,torchingloom/edx-platform,rismalrv/edx-platform,OmarIthawi/edx-platform,chand3040/cloud_that,IONISx/edx-platform,philanthropy-u/edx-platform,cecep-edu/edx-platform,praveen-pal/edx-platform,stvstnfrd/edx-platform,chauhanhardik/populo_2,EduPepperPD/pepper2013,mcgachey/edx-platform,ahmadio/edx-platform,benpatterson/edx-platform,eemirtekin/edx-platform,PepperPD/edx-pepper-platform,10clouds/edx-platform,polimediaupv/edx-platform,pku9104038/edx-platform,nikolas/edx-platform,jjmiranda/edx-platform,cecep-edu/edx-platform,EDUlib/edx-platform,syjeon/new_edx,openfun/edx-platform,ovnicraft/edx-platform,mbareta/edx-platform-ft,cselis86/edx-platform,shubhdev/edx-platform,gsehub/edx-platform,marcore/edx-platform,motion2015/edx-platform,chudaol/edx-platform,IONISx/edx-platform,pelikanchik/edx-platform,hkawasaki/kawasaki-aio8-2,morenopc/edx-platform,jbzdak/edx-platform,motion2015/a3,alu042/edx-platform,shabab12/edx-platform,nagyistoce/edx-platform,ESOedX/edx-platform,inares/edx-platform,kursitet/edx-platform,edx-solutions/edx-platform,chudaol/edx-platform,xingyepei/edx-platform,jelugbo/tundex,deepsrijit1105/edx-platform,Softmotions/edx-platform,romain-li/edx-platform,longmen21/edx-platform,adoosii/edx-platform,mjg2203/edx-platform-seas,alexthered/kienhoc-platform,xingyepei/edx-platform,jzoldak/edx-platform,DefyVentures/edx-platform,romain-li/edx-platform,dkarakats/edx-platform,fly19890211/edx-platform,pomegranited/edx-platform,romain-li/edx-platform,pku9104038/edx-platform,edx/edx-platform,JioEducation/edx-platform,xinjiguaike/edx-platform,zubair-arbi/edx-platform,edx-solutions/edx-platform,pabloborrego93/edx-platform,Softmotions/edx-platform,etzhou/edx-platform,mcgachey/edx-platform,jelugbo/tundex,nttks/edx-platform,nttks/edx-platform,carsongee/edx-platform,jzoldak/edx-platform,angelapper/edx-platform,shashank971/edx-platform,kmoocdev/edx-platform,andyzsf/edx,pepeportela/edx-platform,mjirayu/sit_academy,EduPepperPDTesting/pepper2013-testing,Edraak/circleci-edx-platform,4eek/edx-platform,mjg2203/edx-platform-seas,chrisndodge/edx-platform,abdoosh00/edx-rtl-final,morpheby/levelup-by,IndonesiaX/edx-platform,SivilTaram/edx-platform,amir-qayyum-khan/edx-platform,pabloborrego93/edx-platform,SivilTaram/edx-platform,amir-qayyum-khan/edx-platform,rhndg/openedx,valtech-mooc/edx-platform,JCBarahona/edX,fintech-circle/edx-platform,dsajkl/123,pomegranited/edx-platform,edx-solutions/edx-platform,motion2015/a3,jbassen/edx-platform,jazkarta/edx-platform,nagyistoce/edx-platform,jamiefolsom/edx-platform,openfun/edx-platform,shashank971/edx-platform,chudaol/edx-platform,abdoosh00/edraak,hkawasaki/kawasaki-aio8-1,adoosii/edx-platform,beacloudgenius/edx-platform,jazztpt/edx-platform,chauhanhardik/populo_2,jolyonb/edx-platform,mtlchun/edx,MSOpenTech/edx-platform,prarthitm/edxplatform,doganov/edx-platform,zhenzhai/edx-platform,jazztpt/edx-platform,prarthitm/edxplatform,dkarakats/edx-platform,mitocw/edx-platform,mushtaqak/edx-platform,nikolas/edx-platform,JCBarahona/edX,gsehub/edx-platform,shubhdev/edxOnBaadal,J861449197/edx-platform,openfun/edx-platform,yokose-ks/edx-platform,jbzdak/edx-platform,zofuthan/edx-platform,eduNEXT/edx-platform,dkarakats/edx-platform,caesar2164/edx-platform,playm2mboy/edx-platform,mtlchun/edx,jswope00/griffinx,shubhdev/openedx,knehez/edx-platform,knehez/edx-platform,chudaol/edx-platform,leansoft/edx-platform,proversity-org/edx-platform,edry/edx-platform,UXE/local-edx,franosincic/edx-platform,Unow/edx-platform,IndonesiaX/edx-platform,leansoft/edx-platform,J861449197/edx-platform,jolyonb/edx-platform,EDUlib/edx-platform,bitifirefly/edx-platform,mushtaqak/edx-platform,angelapper/edx-platform,hmcmooc/muddx-platform,devs1991/test_edx_docmode,morpheby/levelup-by,don-github/edx-platform,ahmadio/edx-platform,BehavioralInsightsTeam/edx-platform,EDUlib/edx-platform,BehavioralInsightsTeam/edx-platform,Semi-global/edx-platform,prarthitm/edxplatform,playm2mboy/edx-platform,appliedx/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,hkawasaki/kawasaki-aio8-0,teltek/edx-platform,eduNEXT/edunext-platform,rue89-tech/edx-platform,antonve/s4-project-mooc,SravanthiSinha/edx-platform,kxliugang/edx-platform,hmcmooc/muddx-platform,edry/edx-platform,J861449197/edx-platform,a-parhom/edx-platform,xuxiao19910803/edx-platform,Endika/edx-platform,ESOedX/edx-platform,jonathan-beard/edx-platform,Semi-global/edx-platform,mtlchun/edx,shubhdev/openedx,hamzehd/edx-platform,MakeHer/edx-platform,caesar2164/edx-platform,rismalrv/edx-platform,UOMx/edx-platform,lduarte1991/edx-platform,mtlchun/edx,nagyistoce/edx-platform,jswope00/GAI,ampax/edx-platform,EduPepperPDTesting/pepper2013-testing,zhenzhai/edx-platform,edx/edx-platform,hamzehd/edx-platform,ovnicraft/edx-platform,mjg2203/edx-platform-seas,solashirai/edx-platform,torchingloom/edx-platform,ampax/edx-platform-backup,Lektorium-LLC/edx-platform,playm2mboy/edx-platform,auferack08/edx-platform,appsembler/edx-platform,msegado/edx-platform,sameetb-cuelogic/edx-platform-test,Softmotions/edx-platform,LICEF/edx-platform,antoviaque/edx-platform,praveen-pal/edx-platform,sameetb-cuelogic/edx-platform-test,mcgachey/edx-platform,kmoocdev2/edx-platform,alexthered/kienhoc-platform,nikolas/edx-platform,tanmaykm/edx-platform,UOMx/edx-platform,appliedx/edx-platform,doismellburning/edx-platform,iivic/BoiseStateX,y12uc231/edx-platform,nagyistoce/edx-platform,shubhdev/edxOnBaadal,jjmiranda/edx-platform,Ayub-Khan/edx-platform,gsehub/edx-platform,vismartltd/edx-platform,vasyarv/edx-platform,jbzdak/edx-platform,xuxiao19910803/edx,jelugbo/tundex,deepsrijit1105/edx-platform,synergeticsedx/deployment-wipro,marcore/edx-platform,IndonesiaX/edx-platform,tiagochiavericosta/edx-platform,SivilTaram/edx-platform,waheedahmed/edx-platform,wwj718/ANALYSE,jonathan-beard/edx-platform,abdoosh00/edraak,proversity-org/edx-platform,ampax/edx-platform-backup,don-github/edx-platform,syjeon/new_edx,TeachAtTUM/edx-platform,pepeportela/edx-platform,AkA84/edx-platform,chauhanhardik/populo_2,motion2015/a3,wwj718/edx-platform,ZLLab-Mooc/edx-platform,edx/edx-platform,Endika/edx-platform,atsolakid/edx-platform,dcosentino/edx-platform,DefyVentures/edx-platform,chauhanhardik/populo,OmarIthawi/edx-platform,stvstnfrd/edx-platform,jruiperezv/ANALYSE,eemirtekin/edx-platform,ferabra/edx-platform,fintech-circle/edx-platform,hmcmooc/muddx-platform,vasyarv/edx-platform,unicri/edx-platform,devs1991/test_edx_docmode,DNFcode/edx-platform,wwj718/ANALYSE,jazkarta/edx-platform,pepeportela/edx-platform,pomegranited/edx-platform,jbassen/edx-platform,eduNEXT/edunext-platform,Edraak/edraak-platform,ak2703/edx-platform,EduPepperPDTesting/pepper2013-testing,J861449197/edx-platform,vikas1885/test1,beni55/edx-platform,pdehaye/theming-edx-platform,arbrandes/edx-platform,caesar2164/edx-platform,solashirai/edx-platform,nanolearning/edx-platform,utecuy/edx-platform,bitifirefly/edx-platform,gymnasium/edx-platform,knehez/edx-platform,AkA84/edx-platform,solashirai/edx-platform,jelugbo/tundex,jswope00/griffinx,apigee/edx-platform,synergeticsedx/deployment-wipro,iivic/BoiseStateX,Stanford-Online/edx-platform,zubair-arbi/edx-platform,martynovp/edx-platform,kxliugang/edx-platform,MSOpenTech/edx-platform,MSOpenTech/edx-platform,TeachAtTUM/edx-platform,Edraak/circleci-edx-platform,jswope00/griffinx,naresh21/synergetics-edx-platform,BehavioralInsightsTeam/edx-platform,Edraak/edraak-platform,Edraak/circleci-edx-platform,morenopc/edx-platform,shabab12/edx-platform,playm2mboy/edx-platform,bdero/edx-platform,mushtaqak/edx-platform,jamesblunt/edx-platform,WatanabeYasumasa/edx-platform,dsajkl/123,leansoft/edx-platform,MSOpenTech/edx-platform,Softmotions/edx-platform,xuxiao19910803/edx-platform,LICEF/edx-platform,edry/edx-platform,tanmaykm/edx-platform,xuxiao19910803/edx-platform,WatanabeYasumasa/edx-platform,edry/edx-platform,sameetb-cuelogic/edx-platform-test,zadgroup/edx-platform,Kalyzee/edx-platform,nanolearningllc/edx-platform-cypress,jruiperezv/ANALYSE,eestay/edx-platform,TsinghuaX/edx-platform,fintech-circle/edx-platform,kxliugang/edx-platform,longmen21/edx-platform,abdoosh00/edx-rtl-final,jswope00/griffinx,tiagochiavericosta/edx-platform,eemirtekin/edx-platform,xuxiao19910803/edx-platform,jzoldak/edx-platform,DNFcode/edx-platform,atsolakid/edx-platform,dcosentino/edx-platform,Stanford-Online/edx-platform,vasyarv/edx-platform,benpatterson/edx-platform,zofuthan/edx-platform,hamzehd/edx-platform,waheedahmed/edx-platform,cpennington/edx-platform,beni55/edx-platform,xuxiao19910803/edx,ZLLab-Mooc/edx-platform,xingyepei/edx-platform,MakeHer/edx-platform,mcgachey/edx-platform,ahmedaljazzar/edx-platform,openfun/edx-platform,atsolakid/edx-platform,marcore/edx-platform,waheedahmed/edx-platform,defance/edx-platform,kamalx/edx-platform,vikas1885/test1,ubc/edx-platform,benpatterson/edx-platform,devs1991/test_edx_docmode,LearnEra/LearnEraPlaftform,ferabra/edx-platform,amir-qayyum-khan/edx-platform,antoviaque/edx-platform,syjeon/new_edx,Shrhawk/edx-platform,kamalx/edx-platform,eduNEXT/edunext-platform,CredoReference/edx-platform,ampax/edx-platform-backup,olexiim/edx-platform,IITBinterns13/edx-platform-dev,EduPepperPDTesting/pepper2013-testing,antonve/s4-project-mooc,auferack08/edx-platform,eestay/edx-platform,deepsrijit1105/edx-platform,nttks/edx-platform,IONISx/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,nikolas/edx-platform,Ayub-Khan/edx-platform,motion2015/edx-platform,CredoReference/edx-platform,vismartltd/edx-platform,ampax/edx-platform,pdehaye/theming-edx-platform,Unow/edx-platform,simbs/edx-platform,knehez/edx-platform,nanolearning/edx-platform,4eek/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress-2,jamesblunt/edx-platform,raccoongang/edx-platform,chrisndodge/edx-platform,TsinghuaX/edx-platform,ahmedaljazzar/edx-platform,kalebhartje/schoolboost,RPI-OPENEDX/edx-platform,4eek/edx-platform,y12uc231/edx-platform,ampax/edx-platform,pdehaye/theming-edx-platform,zhenzhai/edx-platform,motion2015/a3,amir-qayyum-khan/edx-platform,jazztpt/edx-platform,hkawasaki/kawasaki-aio8-2,martynovp/edx-platform,zhenzhai/edx-platform,IITBinterns13/edx-platform-dev,nanolearningllc/edx-platform-cypress,Softmotions/edx-platform,kamalx/edx-platform,nttks/jenkins-test,B-MOOC/edx-platform,bigdatauniversity/edx-platform,ubc/edx-platform,auferack08/edx-platform,nanolearningllc/edx-platform-cypress,doismellburning/edx-platform,beacloudgenius/edx-platform,vismartltd/edx-platform,gsehub/edx-platform,Ayub-Khan/edx-platform,yokose-ks/edx-platform,jswope00/GAI,hkawasaki/kawasaki-aio8-1,ubc/edx-platform,apigee/edx-platform,wwj718/edx-platform,simbs/edx-platform,hastexo/edx-platform,arbrandes/edx-platform,defance/edx-platform,dcosentino/edx-platform,Livit/Livit.Learn.EdX,jbzdak/edx-platform,simbs/edx-platform,IONISx/edx-platform,torchingloom/edx-platform,kmoocdev2/edx-platform,xingyepei/edx-platform,inares/edx-platform,B-MOOC/edx-platform,jzoldak/edx-platform,nanolearningllc/edx-platform-cypress-2,y12uc231/edx-platform,4eek/edx-platform,cselis86/edx-platform,zubair-arbi/edx-platform,mushtaqak/edx-platform,eestay/edx-platform,JCBarahona/edX,shurihell/testasia,valtech-mooc/edx-platform,antonve/s4-project-mooc,MakeHer/edx-platform,romain-li/edx-platform,appliedx/edx-platform,longmen21/edx-platform,teltek/edx-platform,DNFcode/edx-platform,alu042/edx-platform,zadgroup/edx-platform,SravanthiSinha/edx-platform,ESOedX/edx-platform,Unow/edx-platform,jazkarta/edx-platform-for-isc,bigdatauniversity/edx-platform,shabab12/edx-platform,kalebhartje/schoolboost,ahmadio/edx-platform,beni55/edx-platform,nttks/jenkins-test,xuxiao19910803/edx,don-github/edx-platform,jamiefolsom/edx-platform,abdoosh00/edx-rtl-final,vikas1885/test1,shubhdev/edx-platform,rue89-tech/edx-platform,nanolearningllc/edx-platform-cypress-2,tanmaykm/edx-platform,lduarte1991/edx-platform,franosincic/edx-platform,longmen21/edx-platform,morpheby/levelup-by,rue89-tech/edx-platform,kmoocdev2/edx-platform,appliedx/edx-platform,arifsetiawan/edx-platform,motion2015/edx-platform,peterm-itr/edx-platform,teltek/edx-platform,chand3040/cloud_that,cyanna/edx-platform,beacloudgenius/edx-platform,PepperPD/edx-pepper-platform,bdero/edx-platform,zerobatu/edx-platform,louyihua/edx-platform,mjirayu/sit_academy,AkA84/edx-platform,doismellburning/edx-platform,IndonesiaX/edx-platform,synergeticsedx/deployment-wipro,JCBarahona/edX,Livit/Livit.Learn.EdX,alexthered/kienhoc-platform,shurihell/testasia,atsolakid/edx-platform,kxliugang/edx-platform,inares/edx-platform,MSOpenTech/edx-platform,Ayub-Khan/edx-platform,fly19890211/edx-platform,rismalrv/edx-platform,mushtaqak/edx-platform,nttks/jenkins-test,gymnasium/edx-platform,unicri/edx-platform,doganov/edx-platform,procangroup/edx-platform,andyzsf/edx,Stanford-Online/edx-platform,polimediaupv/edx-platform,hkawasaki/kawasaki-aio8-0,raccoongang/edx-platform,tiagochiavericosta/edx-platform,jamesblunt/edx-platform,Unow/edx-platform,4eek/edx-platform,itsjeyd/edx-platform,shubhdev/openedx,teltek/edx-platform,xuxiao19910803/edx,CourseTalk/edx-platform,dkarakats/edx-platform,fly19890211/edx-platform,JioEducation/edx-platform,gymnasium/edx-platform,TsinghuaX/edx-platform,unicri/edx-platform,cyanna/edx-platform,halvertoluke/edx-platform,knehez/edx-platform,hkawasaki/kawasaki-aio8-0,mitocw/edx-platform,caesar2164/edx-platform,appsembler/edx-platform,mjirayu/sit_academy,kmoocdev/edx-platform,jonathan-beard/edx-platform,philanthropy-u/edx-platform,doismellburning/edx-platform,jazkarta/edx-platform,praveen-pal/edx-platform,jazztpt/edx-platform,CourseTalk/edx-platform,hkawasaki/kawasaki-aio8-1,CourseTalk/edx-platform,vikas1885/test1,J861449197/edx-platform,ahmadiga/min_edx,kursitet/edx-platform,kalebhartje/schoolboost,doganov/edx-platform,zubair-arbi/edx-platform,antoviaque/edx-platform,romain-li/edx-platform,edry/edx-platform,mahendra-r/edx-platform,zerobatu/edx-platform,jazkarta/edx-platform-for-isc,olexiim/edx-platform,kamalx/edx-platform,jswope00/GAI,zhenzhai/edx-platform,cyanna/edx-platform,alu042/edx-platform,louyihua/edx-platform,xinjiguaike/edx-platform,kmoocdev2/edx-platform,dsajkl/reqiop,UOMx/edx-platform,jjmiranda/edx-platform,bdero/edx-platform,ferabra/edx-platform,Edraak/edx-platform,SravanthiSinha/edx-platform,zerobatu/edx-platform,kmoocdev2/edx-platform,Livit/Livit.Learn.EdX,msegado/edx-platform,Kalyzee/edx-platform,Kalyzee/edx-platform,jazkarta/edx-platform,zerobatu/edx-platform,angelapper/edx-platform,jonathan-beard/edx-platform,EduPepperPDTesting/pepper2013-testing,JioEducation/edx-platform,Semi-global/edx-platform,olexiim/edx-platform,auferack08/edx-platform,rationalAgent/edx-platform-custom,nanolearning/edx-platform,louyihua/edx-platform,adoosii/edx-platform,bitifirefly/edx-platform,WatanabeYasumasa/edx-platform,rismalrv/edx-platform,chand3040/cloud_that,ahmadiga/min_edx,pelikanchik/edx-platform,Stanford-Online/edx-platform,cognitiveclass/edx-platform,zadgroup/edx-platform,prarthitm/edxplatform,valtech-mooc/edx-platform,apigee/edx-platform,syjeon/new_edx,wwj718/edx-platform,mjg2203/edx-platform-seas,shurihell/testasia,edx-solutions/edx-platform,nanolearning/edx-platform,leansoft/edx-platform,defance/edx-platform,Shrhawk/edx-platform,inares/edx-platform,bigdatauniversity/edx-platform,pepeportela/edx-platform,Lektorium-LLC/edx-platform,procangroup/edx-platform,hkawasaki/kawasaki-aio8-0,eduNEXT/edx-platform,beacloudgenius/edx-platform,jolyonb/edx-platform,valtech-mooc/edx-platform,jazkarta/edx-platform,solashirai/edx-platform,nttks/edx-platform,eduNEXT/edx-platform,carsongee/edx-platform,halvertoluke/edx-platform,msegado/edx-platform,BehavioralInsightsTeam/edx-platform,a-parhom/edx-platform,xinjiguaike/edx-platform,OmarIthawi/edx-platform,MakeHer/edx-platform,morenopc/edx-platform,ak2703/edx-platform,Shrhawk/edx-platform,pomegranited/edx-platform,RPI-OPENEDX/edx-platform,gymnasium/edx-platform,wwj718/ANALYSE,ZLLab-Mooc/edx-platform,peterm-itr/edx-platform,proversity-org/edx-platform,kxliugang/edx-platform,arifsetiawan/edx-platform,jswope00/griffinx,olexiim/edx-platform,cognitiveclass/edx-platform,andyzsf/edx,torchingloom/edx-platform,ferabra/edx-platform,wwj718/edx-platform,nanolearningllc/edx-platform-cypress-2,mjirayu/sit_academy,beni55/edx-platform,pabloborrego93/edx-platform,cecep-edu/edx-platform,Shrhawk/edx-platform,itsjeyd/edx-platform,UXE/local-edx,Lektorium-LLC/edx-platform,hamzehd/edx-platform,motion2015/a3,leansoft/edx-platform,martynovp/edx-platform,jelugbo/tundex,IONISx/edx-platform,shubhdev/edx-platform,UOMx/edx-platform,WatanabeYasumasa/edx-platform,chauhanhardik/populo,Edraak/edraak-platform,jruiperezv/ANALYSE,jbassen/edx-platform,mitocw/edx-platform,yokose-ks/edx-platform,analyseuc3m/ANALYSE-v1,DefyVentures/edx-platform,appsembler/edx-platform,etzhou/edx-platform,SravanthiSinha/edx-platform,rhndg/openedx,arifsetiawan/edx-platform,martynovp/edx-platform,inares/edx-platform,Shrhawk/edx-platform,Edraak/circleci-edx-platform
|
Add tests for diango-comment-client middleware
|
import string
import random
import collections
from django.test import TestCase
import comment_client
import django.http
import django_comment_client.middleware as middleware
class AjaxExceptionTestCase(TestCase):
# TODO: check whether the correct error message is produced.
# The error message should be the same as the argument to CommentClientError
def setUp(self):
self.a = middleware.AjaxExceptionMiddleware()
self.request1 = django.http.HttpRequest()
self.request0 = django.http.HttpRequest()
self.exception1 = comment_client.CommentClientError('{}')
self.exception0 = ValueError()
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
def test_process_exception(self):
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
<commit_before><commit_msg>Add tests for diango-comment-client middleware<commit_after>
|
import string
import random
import collections
from django.test import TestCase
import comment_client
import django.http
import django_comment_client.middleware as middleware
class AjaxExceptionTestCase(TestCase):
# TODO: check whether the correct error message is produced.
# The error message should be the same as the argument to CommentClientError
def setUp(self):
self.a = middleware.AjaxExceptionMiddleware()
self.request1 = django.http.HttpRequest()
self.request0 = django.http.HttpRequest()
self.exception1 = comment_client.CommentClientError('{}')
self.exception0 = ValueError()
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
def test_process_exception(self):
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
Add tests for diango-comment-client middlewareimport string
import random
import collections
from django.test import TestCase
import comment_client
import django.http
import django_comment_client.middleware as middleware
class AjaxExceptionTestCase(TestCase):
# TODO: check whether the correct error message is produced.
# The error message should be the same as the argument to CommentClientError
def setUp(self):
self.a = middleware.AjaxExceptionMiddleware()
self.request1 = django.http.HttpRequest()
self.request0 = django.http.HttpRequest()
self.exception1 = comment_client.CommentClientError('{}')
self.exception0 = ValueError()
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
def test_process_exception(self):
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
<commit_before><commit_msg>Add tests for diango-comment-client middleware<commit_after>import string
import random
import collections
from django.test import TestCase
import comment_client
import django.http
import django_comment_client.middleware as middleware
class AjaxExceptionTestCase(TestCase):
# TODO: check whether the correct error message is produced.
# The error message should be the same as the argument to CommentClientError
def setUp(self):
self.a = middleware.AjaxExceptionMiddleware()
self.request1 = django.http.HttpRequest()
self.request0 = django.http.HttpRequest()
self.exception1 = comment_client.CommentClientError('{}')
self.exception0 = ValueError()
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
def test_process_exception(self):
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
|
03c5da23653d99d7d01c29b6c66204bb21d9467d
|
test_infra/test_cdn.py
|
test_infra/test_cdn.py
|
import pytest
import requests
MLS_URL = 'https://location.services.mozilla.com/v1/country' \
'?key=ec4d0c4b-b9ac-4d72-9197-289160930e14'
@pytest.mark.parametrize('url', (
'/',
'/firefox/',
'/firefox/new/',
'/about/',
))
@pytest.mark.nondestructive
def test_locale_redirect(url, base_url):
resp = requests.get(f'{base_url}{url}',
allow_redirects=False,
headers={'accept-language': 'de'})
assert resp.status_code == 301
assert 'accept-language' in resp.headers['vary'].lower()
assert resp.headers['location'].startswith('/de/')
@pytest.mark.parametrize('url', (
# only in s3
'/media/contentcards/img/home-en/card_2/card_2.73be009fe44e.jpg',
# comes from bedrock
'/media/protocol/img/logos/mozilla/black.40d1af88c248.svg',
))
@pytest.mark.nondestructive
def test_media(url, base_url):
"""Verify that media is well cached and loaded from s3"""
url = f'{base_url}{url}'
resp = requests.head(url)
assert resp.status_code == 200
assert resp.headers['cache-control'] == 'max-age=315360000, public, immutable'
# this means it came from s3
assert 'x-amz-version-id' in resp.headers
# cloudfront
assert 'x-cache' in resp.headers
assert 'x-amz-cf-id' in resp.headers
assert 'cloudfront' in resp.headers['x-cache']
@pytest.mark.nondestructive
def test_geo(base_url):
"""Make sure our geo results match MLS no matter where they're run"""
cdn_url = f'{base_url}/country-code.json'
mls_country = requests.get(MLS_URL).json()['country_code']
cdn_country = requests.get(cdn_url).json()['country_code']
assert cdn_country == mls_country
|
Add tests for CDN infra
|
Add tests for CDN infra
Add tests for:
* /media/* urls
* locale redirects and vary headers
* geolocation
|
Python
|
mpl-2.0
|
mozilla/bedrock,mozilla/bedrock,mozilla/bedrock,craigcook/bedrock,alexgibson/bedrock,MichaelKohler/bedrock,alexgibson/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,mozilla/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,flodolo/bedrock,sylvestre/bedrock,craigcook/bedrock,craigcook/bedrock,alexgibson/bedrock,flodolo/bedrock,alexgibson/bedrock,MichaelKohler/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,pascalchevrel/bedrock,craigcook/bedrock,flodolo/bedrock,flodolo/bedrock,sylvestre/bedrock,MichaelKohler/bedrock
|
Add tests for CDN infra
Add tests for:
* /media/* urls
* locale redirects and vary headers
* geolocation
|
import pytest
import requests
MLS_URL = 'https://location.services.mozilla.com/v1/country' \
'?key=ec4d0c4b-b9ac-4d72-9197-289160930e14'
@pytest.mark.parametrize('url', (
'/',
'/firefox/',
'/firefox/new/',
'/about/',
))
@pytest.mark.nondestructive
def test_locale_redirect(url, base_url):
resp = requests.get(f'{base_url}{url}',
allow_redirects=False,
headers={'accept-language': 'de'})
assert resp.status_code == 301
assert 'accept-language' in resp.headers['vary'].lower()
assert resp.headers['location'].startswith('/de/')
@pytest.mark.parametrize('url', (
# only in s3
'/media/contentcards/img/home-en/card_2/card_2.73be009fe44e.jpg',
# comes from bedrock
'/media/protocol/img/logos/mozilla/black.40d1af88c248.svg',
))
@pytest.mark.nondestructive
def test_media(url, base_url):
"""Verify that media is well cached and loaded from s3"""
url = f'{base_url}{url}'
resp = requests.head(url)
assert resp.status_code == 200
assert resp.headers['cache-control'] == 'max-age=315360000, public, immutable'
# this means it came from s3
assert 'x-amz-version-id' in resp.headers
# cloudfront
assert 'x-cache' in resp.headers
assert 'x-amz-cf-id' in resp.headers
assert 'cloudfront' in resp.headers['x-cache']
@pytest.mark.nondestructive
def test_geo(base_url):
"""Make sure our geo results match MLS no matter where they're run"""
cdn_url = f'{base_url}/country-code.json'
mls_country = requests.get(MLS_URL).json()['country_code']
cdn_country = requests.get(cdn_url).json()['country_code']
assert cdn_country == mls_country
|
<commit_before><commit_msg>Add tests for CDN infra
Add tests for:
* /media/* urls
* locale redirects and vary headers
* geolocation<commit_after>
|
import pytest
import requests
MLS_URL = 'https://location.services.mozilla.com/v1/country' \
'?key=ec4d0c4b-b9ac-4d72-9197-289160930e14'
@pytest.mark.parametrize('url', (
'/',
'/firefox/',
'/firefox/new/',
'/about/',
))
@pytest.mark.nondestructive
def test_locale_redirect(url, base_url):
resp = requests.get(f'{base_url}{url}',
allow_redirects=False,
headers={'accept-language': 'de'})
assert resp.status_code == 301
assert 'accept-language' in resp.headers['vary'].lower()
assert resp.headers['location'].startswith('/de/')
@pytest.mark.parametrize('url', (
# only in s3
'/media/contentcards/img/home-en/card_2/card_2.73be009fe44e.jpg',
# comes from bedrock
'/media/protocol/img/logos/mozilla/black.40d1af88c248.svg',
))
@pytest.mark.nondestructive
def test_media(url, base_url):
"""Verify that media is well cached and loaded from s3"""
url = f'{base_url}{url}'
resp = requests.head(url)
assert resp.status_code == 200
assert resp.headers['cache-control'] == 'max-age=315360000, public, immutable'
# this means it came from s3
assert 'x-amz-version-id' in resp.headers
# cloudfront
assert 'x-cache' in resp.headers
assert 'x-amz-cf-id' in resp.headers
assert 'cloudfront' in resp.headers['x-cache']
@pytest.mark.nondestructive
def test_geo(base_url):
"""Make sure our geo results match MLS no matter where they're run"""
cdn_url = f'{base_url}/country-code.json'
mls_country = requests.get(MLS_URL).json()['country_code']
cdn_country = requests.get(cdn_url).json()['country_code']
assert cdn_country == mls_country
|
Add tests for CDN infra
Add tests for:
* /media/* urls
* locale redirects and vary headers
* geolocationimport pytest
import requests
MLS_URL = 'https://location.services.mozilla.com/v1/country' \
'?key=ec4d0c4b-b9ac-4d72-9197-289160930e14'
@pytest.mark.parametrize('url', (
'/',
'/firefox/',
'/firefox/new/',
'/about/',
))
@pytest.mark.nondestructive
def test_locale_redirect(url, base_url):
resp = requests.get(f'{base_url}{url}',
allow_redirects=False,
headers={'accept-language': 'de'})
assert resp.status_code == 301
assert 'accept-language' in resp.headers['vary'].lower()
assert resp.headers['location'].startswith('/de/')
@pytest.mark.parametrize('url', (
# only in s3
'/media/contentcards/img/home-en/card_2/card_2.73be009fe44e.jpg',
# comes from bedrock
'/media/protocol/img/logos/mozilla/black.40d1af88c248.svg',
))
@pytest.mark.nondestructive
def test_media(url, base_url):
"""Verify that media is well cached and loaded from s3"""
url = f'{base_url}{url}'
resp = requests.head(url)
assert resp.status_code == 200
assert resp.headers['cache-control'] == 'max-age=315360000, public, immutable'
# this means it came from s3
assert 'x-amz-version-id' in resp.headers
# cloudfront
assert 'x-cache' in resp.headers
assert 'x-amz-cf-id' in resp.headers
assert 'cloudfront' in resp.headers['x-cache']
@pytest.mark.nondestructive
def test_geo(base_url):
"""Make sure our geo results match MLS no matter where they're run"""
cdn_url = f'{base_url}/country-code.json'
mls_country = requests.get(MLS_URL).json()['country_code']
cdn_country = requests.get(cdn_url).json()['country_code']
assert cdn_country == mls_country
|
<commit_before><commit_msg>Add tests for CDN infra
Add tests for:
* /media/* urls
* locale redirects and vary headers
* geolocation<commit_after>import pytest
import requests
MLS_URL = 'https://location.services.mozilla.com/v1/country' \
'?key=ec4d0c4b-b9ac-4d72-9197-289160930e14'
@pytest.mark.parametrize('url', (
'/',
'/firefox/',
'/firefox/new/',
'/about/',
))
@pytest.mark.nondestructive
def test_locale_redirect(url, base_url):
resp = requests.get(f'{base_url}{url}',
allow_redirects=False,
headers={'accept-language': 'de'})
assert resp.status_code == 301
assert 'accept-language' in resp.headers['vary'].lower()
assert resp.headers['location'].startswith('/de/')
@pytest.mark.parametrize('url', (
# only in s3
'/media/contentcards/img/home-en/card_2/card_2.73be009fe44e.jpg',
# comes from bedrock
'/media/protocol/img/logos/mozilla/black.40d1af88c248.svg',
))
@pytest.mark.nondestructive
def test_media(url, base_url):
"""Verify that media is well cached and loaded from s3"""
url = f'{base_url}{url}'
resp = requests.head(url)
assert resp.status_code == 200
assert resp.headers['cache-control'] == 'max-age=315360000, public, immutable'
# this means it came from s3
assert 'x-amz-version-id' in resp.headers
# cloudfront
assert 'x-cache' in resp.headers
assert 'x-amz-cf-id' in resp.headers
assert 'cloudfront' in resp.headers['x-cache']
@pytest.mark.nondestructive
def test_geo(base_url):
"""Make sure our geo results match MLS no matter where they're run"""
cdn_url = f'{base_url}/country-code.json'
mls_country = requests.get(MLS_URL).json()['country_code']
cdn_country = requests.get(cdn_url).json()['country_code']
assert cdn_country == mls_country
|
|
a2353846c97f94a23c01a51f8589cd3b8ff17376
|
test/test_receiver.py
|
test/test_receiver.py
|
import os
import tempfile
import unittest
import bin.receiver
from ssm.ssm2 import Ssm2Exception
class getDNsTest(unittest.TestCase):
def setUp(self):
self.tf, self.tf_path = tempfile.mkstemp()
os.close(self.tf)
def test_empty_dns_file(self):
self.assertRaises(Ssm2Exception, bin.receiver.get_dns, self.tf_path)
def tearDown(self):
os.remove(self.tf_path)
if __name__ == '__main__':
unittest.main()
|
Add skeleton test for bin/receiver
|
Add skeleton test for bin/receiver
- Add skeleton test for bin/receiver so that the coverage stats become
more accurate.
|
Python
|
apache-2.0
|
tofu-rocketry/ssm,apel/ssm,tofu-rocketry/ssm,stfc/ssm,stfc/ssm,apel/ssm
|
Add skeleton test for bin/receiver
- Add skeleton test for bin/receiver so that the coverage stats become
more accurate.
|
import os
import tempfile
import unittest
import bin.receiver
from ssm.ssm2 import Ssm2Exception
class getDNsTest(unittest.TestCase):
def setUp(self):
self.tf, self.tf_path = tempfile.mkstemp()
os.close(self.tf)
def test_empty_dns_file(self):
self.assertRaises(Ssm2Exception, bin.receiver.get_dns, self.tf_path)
def tearDown(self):
os.remove(self.tf_path)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add skeleton test for bin/receiver
- Add skeleton test for bin/receiver so that the coverage stats become
more accurate.<commit_after>
|
import os
import tempfile
import unittest
import bin.receiver
from ssm.ssm2 import Ssm2Exception
class getDNsTest(unittest.TestCase):
def setUp(self):
self.tf, self.tf_path = tempfile.mkstemp()
os.close(self.tf)
def test_empty_dns_file(self):
self.assertRaises(Ssm2Exception, bin.receiver.get_dns, self.tf_path)
def tearDown(self):
os.remove(self.tf_path)
if __name__ == '__main__':
unittest.main()
|
Add skeleton test for bin/receiver
- Add skeleton test for bin/receiver so that the coverage stats become
more accurate.import os
import tempfile
import unittest
import bin.receiver
from ssm.ssm2 import Ssm2Exception
class getDNsTest(unittest.TestCase):
def setUp(self):
self.tf, self.tf_path = tempfile.mkstemp()
os.close(self.tf)
def test_empty_dns_file(self):
self.assertRaises(Ssm2Exception, bin.receiver.get_dns, self.tf_path)
def tearDown(self):
os.remove(self.tf_path)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add skeleton test for bin/receiver
- Add skeleton test for bin/receiver so that the coverage stats become
more accurate.<commit_after>import os
import tempfile
import unittest
import bin.receiver
from ssm.ssm2 import Ssm2Exception
class getDNsTest(unittest.TestCase):
def setUp(self):
self.tf, self.tf_path = tempfile.mkstemp()
os.close(self.tf)
def test_empty_dns_file(self):
self.assertRaises(Ssm2Exception, bin.receiver.get_dns, self.tf_path)
def tearDown(self):
os.remove(self.tf_path)
if __name__ == '__main__':
unittest.main()
|
|
b9a9a684d94efe94523a4172ce0074ede906b5bc
|
tests/test_profiles.py
|
tests/test_profiles.py
|
import numpy as np
import unittest
from desc.io import InputReader
from desc.equilibrium import Equilibrium
from desc.profiles import PowerSeriesProfile
class TestProfiles(unittest.TestCase):
def test_same_result(self):
input_path = "examples/DESC/SOLOVEV"
ir = InputReader(input_path)
eq1 = Equilibrium(ir.inputs[-1])
eq2 = eq1.copy()
eq2.pressure = eq1.pressure.to_spline()
eq2.iota = eq1.iota.to_spline()
eq1.solve()
eq2.solve()
np.testing.assert_allclose(
eq1.x,
eq2.x,
rtol=1e-05,
atol=1e-08,
)
def test_close_values(self):
pp = PowerSeriesProfile(np.array([0, 2, 4]), np.array([1, -2, 1]))
sp = pp.to_spline()
x = np.linspace(0, 1, 100)
np.testing.assert_allclose(pp.compute(x), sp.compute(x), rtol=1e-5, atol=1e-3)
pp1 = sp.to_powerseries(order=4)
np.testing.assert_allclose(pp.coeffs, pp1.coeffs, rtol=1e-5, atol=1e-2)
|
Add tests for profile classes
|
Add tests for profile classes
|
Python
|
mit
|
PlasmaControl/DESC,PlasmaControl/DESC
|
Add tests for profile classes
|
import numpy as np
import unittest
from desc.io import InputReader
from desc.equilibrium import Equilibrium
from desc.profiles import PowerSeriesProfile
class TestProfiles(unittest.TestCase):
def test_same_result(self):
input_path = "examples/DESC/SOLOVEV"
ir = InputReader(input_path)
eq1 = Equilibrium(ir.inputs[-1])
eq2 = eq1.copy()
eq2.pressure = eq1.pressure.to_spline()
eq2.iota = eq1.iota.to_spline()
eq1.solve()
eq2.solve()
np.testing.assert_allclose(
eq1.x,
eq2.x,
rtol=1e-05,
atol=1e-08,
)
def test_close_values(self):
pp = PowerSeriesProfile(np.array([0, 2, 4]), np.array([1, -2, 1]))
sp = pp.to_spline()
x = np.linspace(0, 1, 100)
np.testing.assert_allclose(pp.compute(x), sp.compute(x), rtol=1e-5, atol=1e-3)
pp1 = sp.to_powerseries(order=4)
np.testing.assert_allclose(pp.coeffs, pp1.coeffs, rtol=1e-5, atol=1e-2)
|
<commit_before><commit_msg>Add tests for profile classes<commit_after>
|
import numpy as np
import unittest
from desc.io import InputReader
from desc.equilibrium import Equilibrium
from desc.profiles import PowerSeriesProfile
class TestProfiles(unittest.TestCase):
def test_same_result(self):
input_path = "examples/DESC/SOLOVEV"
ir = InputReader(input_path)
eq1 = Equilibrium(ir.inputs[-1])
eq2 = eq1.copy()
eq2.pressure = eq1.pressure.to_spline()
eq2.iota = eq1.iota.to_spline()
eq1.solve()
eq2.solve()
np.testing.assert_allclose(
eq1.x,
eq2.x,
rtol=1e-05,
atol=1e-08,
)
def test_close_values(self):
pp = PowerSeriesProfile(np.array([0, 2, 4]), np.array([1, -2, 1]))
sp = pp.to_spline()
x = np.linspace(0, 1, 100)
np.testing.assert_allclose(pp.compute(x), sp.compute(x), rtol=1e-5, atol=1e-3)
pp1 = sp.to_powerseries(order=4)
np.testing.assert_allclose(pp.coeffs, pp1.coeffs, rtol=1e-5, atol=1e-2)
|
Add tests for profile classesimport numpy as np
import unittest
from desc.io import InputReader
from desc.equilibrium import Equilibrium
from desc.profiles import PowerSeriesProfile
class TestProfiles(unittest.TestCase):
def test_same_result(self):
input_path = "examples/DESC/SOLOVEV"
ir = InputReader(input_path)
eq1 = Equilibrium(ir.inputs[-1])
eq2 = eq1.copy()
eq2.pressure = eq1.pressure.to_spline()
eq2.iota = eq1.iota.to_spline()
eq1.solve()
eq2.solve()
np.testing.assert_allclose(
eq1.x,
eq2.x,
rtol=1e-05,
atol=1e-08,
)
def test_close_values(self):
pp = PowerSeriesProfile(np.array([0, 2, 4]), np.array([1, -2, 1]))
sp = pp.to_spline()
x = np.linspace(0, 1, 100)
np.testing.assert_allclose(pp.compute(x), sp.compute(x), rtol=1e-5, atol=1e-3)
pp1 = sp.to_powerseries(order=4)
np.testing.assert_allclose(pp.coeffs, pp1.coeffs, rtol=1e-5, atol=1e-2)
|
<commit_before><commit_msg>Add tests for profile classes<commit_after>import numpy as np
import unittest
from desc.io import InputReader
from desc.equilibrium import Equilibrium
from desc.profiles import PowerSeriesProfile
class TestProfiles(unittest.TestCase):
def test_same_result(self):
input_path = "examples/DESC/SOLOVEV"
ir = InputReader(input_path)
eq1 = Equilibrium(ir.inputs[-1])
eq2 = eq1.copy()
eq2.pressure = eq1.pressure.to_spline()
eq2.iota = eq1.iota.to_spline()
eq1.solve()
eq2.solve()
np.testing.assert_allclose(
eq1.x,
eq2.x,
rtol=1e-05,
atol=1e-08,
)
def test_close_values(self):
pp = PowerSeriesProfile(np.array([0, 2, 4]), np.array([1, -2, 1]))
sp = pp.to_spline()
x = np.linspace(0, 1, 100)
np.testing.assert_allclose(pp.compute(x), sp.compute(x), rtol=1e-5, atol=1e-3)
pp1 = sp.to_powerseries(order=4)
np.testing.assert_allclose(pp.coeffs, pp1.coeffs, rtol=1e-5, atol=1e-2)
|
|
72c085ac3fde2a294a09b2065e1fe38a33f8841c
|
functest/tests/unit/features/test_odl_sfc.py
|
functest/tests/unit/features/test_odl_sfc.py
|
#!/usr/bin/env python
# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
import logging
import unittest
from functest.opnfv_tests.features import odl_sfc
from functest.utils import constants
class OpenDaylightSFCTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
def setUp(self):
self.odl_sfc = odl_sfc.OpenDaylightSFC()
def test_init(self):
self.assertEqual(self.odl_sfc.project_name, "sfc")
self.assertEqual(self.odl_sfc.case_name, "functest-odl-sfc")
self.assertEqual(
self.odl_sfc.repo,
constants.CONST.__getattribute__("dir_repo_sfc"))
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.odl_sfc.repo)
self.assertEqual(
self.odl_sfc.cmd,
'cd {} && python ./run_tests.py'.format(dir_sfc_functest))
if __name__ == "__main__":
unittest.main(verbosity=2)
|
Add unit tests for odl_sfc
|
Add unit tests for odl_sfc
Change-Id: I8eb037a8c2427695d42207897064b79cb2b03a5d
Signed-off-by: Cédric Ollivier <d48310251a4a484d041bc5d09a9ac4d86d20f793@orange.com>
|
Python
|
apache-2.0
|
opnfv/functest,opnfv/functest,mywulin/functest,mywulin/functest
|
Add unit tests for odl_sfc
Change-Id: I8eb037a8c2427695d42207897064b79cb2b03a5d
Signed-off-by: Cédric Ollivier <d48310251a4a484d041bc5d09a9ac4d86d20f793@orange.com>
|
#!/usr/bin/env python
# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
import logging
import unittest
from functest.opnfv_tests.features import odl_sfc
from functest.utils import constants
class OpenDaylightSFCTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
def setUp(self):
self.odl_sfc = odl_sfc.OpenDaylightSFC()
def test_init(self):
self.assertEqual(self.odl_sfc.project_name, "sfc")
self.assertEqual(self.odl_sfc.case_name, "functest-odl-sfc")
self.assertEqual(
self.odl_sfc.repo,
constants.CONST.__getattribute__("dir_repo_sfc"))
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.odl_sfc.repo)
self.assertEqual(
self.odl_sfc.cmd,
'cd {} && python ./run_tests.py'.format(dir_sfc_functest))
if __name__ == "__main__":
unittest.main(verbosity=2)
|
<commit_before><commit_msg>Add unit tests for odl_sfc
Change-Id: I8eb037a8c2427695d42207897064b79cb2b03a5d
Signed-off-by: Cédric Ollivier <d48310251a4a484d041bc5d09a9ac4d86d20f793@orange.com><commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
import logging
import unittest
from functest.opnfv_tests.features import odl_sfc
from functest.utils import constants
class OpenDaylightSFCTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
def setUp(self):
self.odl_sfc = odl_sfc.OpenDaylightSFC()
def test_init(self):
self.assertEqual(self.odl_sfc.project_name, "sfc")
self.assertEqual(self.odl_sfc.case_name, "functest-odl-sfc")
self.assertEqual(
self.odl_sfc.repo,
constants.CONST.__getattribute__("dir_repo_sfc"))
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.odl_sfc.repo)
self.assertEqual(
self.odl_sfc.cmd,
'cd {} && python ./run_tests.py'.format(dir_sfc_functest))
if __name__ == "__main__":
unittest.main(verbosity=2)
|
Add unit tests for odl_sfc
Change-Id: I8eb037a8c2427695d42207897064b79cb2b03a5d
Signed-off-by: Cédric Ollivier <d48310251a4a484d041bc5d09a9ac4d86d20f793@orange.com>#!/usr/bin/env python
# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
import logging
import unittest
from functest.opnfv_tests.features import odl_sfc
from functest.utils import constants
class OpenDaylightSFCTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
def setUp(self):
self.odl_sfc = odl_sfc.OpenDaylightSFC()
def test_init(self):
self.assertEqual(self.odl_sfc.project_name, "sfc")
self.assertEqual(self.odl_sfc.case_name, "functest-odl-sfc")
self.assertEqual(
self.odl_sfc.repo,
constants.CONST.__getattribute__("dir_repo_sfc"))
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.odl_sfc.repo)
self.assertEqual(
self.odl_sfc.cmd,
'cd {} && python ./run_tests.py'.format(dir_sfc_functest))
if __name__ == "__main__":
unittest.main(verbosity=2)
|
<commit_before><commit_msg>Add unit tests for odl_sfc
Change-Id: I8eb037a8c2427695d42207897064b79cb2b03a5d
Signed-off-by: Cédric Ollivier <d48310251a4a484d041bc5d09a9ac4d86d20f793@orange.com><commit_after>#!/usr/bin/env python
# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
import logging
import unittest
from functest.opnfv_tests.features import odl_sfc
from functest.utils import constants
class OpenDaylightSFCTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
def setUp(self):
self.odl_sfc = odl_sfc.OpenDaylightSFC()
def test_init(self):
self.assertEqual(self.odl_sfc.project_name, "sfc")
self.assertEqual(self.odl_sfc.case_name, "functest-odl-sfc")
self.assertEqual(
self.odl_sfc.repo,
constants.CONST.__getattribute__("dir_repo_sfc"))
dir_sfc_functest = '{}/sfc/tests/functest'.format(self.odl_sfc.repo)
self.assertEqual(
self.odl_sfc.cmd,
'cd {} && python ./run_tests.py'.format(dir_sfc_functest))
if __name__ == "__main__":
unittest.main(verbosity=2)
|
|
e069020558c53b24eb164aaaf6124130fdd2daab
|
shop/models/fields.py
|
shop/models/fields.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FALG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FALG = True
try:
if POSTGRES_FALG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
Add new the JSONField Wrapper
|
Add new the JSONField Wrapper
|
Python
|
bsd-3-clause
|
divio/django-shop,nimbis/django-shop,nimbis/django-shop,jrief/django-shop,khchine5/django-shop,divio/django-shop,khchine5/django-shop,khchine5/django-shop,nimbis/django-shop,khchine5/django-shop,jrief/django-shop,awesto/django-shop,jrief/django-shop,awesto/django-shop,awesto/django-shop,nimbis/django-shop,divio/django-shop,jrief/django-shop
|
Add new the JSONField Wrapper
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FALG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FALG = True
try:
if POSTGRES_FALG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
<commit_before><commit_msg>Add new the JSONField Wrapper<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FALG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FALG = True
try:
if POSTGRES_FALG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
Add new the JSONField Wrapper# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FALG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FALG = True
try:
if POSTGRES_FALG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
<commit_before><commit_msg>Add new the JSONField Wrapper<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FALG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FALG = True
try:
if POSTGRES_FALG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
|
dc4ce85a621e0ffccfa9dbf400f558333e813df4
|
aids/strings/remove_duplicates.py
|
aids/strings/remove_duplicates.py
|
'''
In this module we remove duplicates in a string
'''
def remove_duplicates(string):
'''
Remove duplicated characters in string
'''
result = []
seen = set()
for char in string:
if char not in seen:
seen.add(char)
result.append(char)
return ''.join(result)
|
Add function to remove duplicates from a string
|
Add function to remove duplicates from a string
|
Python
|
mit
|
ueg1990/aids
|
Add function to remove duplicates from a string
|
'''
In this module we remove duplicates in a string
'''
def remove_duplicates(string):
'''
Remove duplicated characters in string
'''
result = []
seen = set()
for char in string:
if char not in seen:
seen.add(char)
result.append(char)
return ''.join(result)
|
<commit_before><commit_msg>Add function to remove duplicates from a string<commit_after>
|
'''
In this module we remove duplicates in a string
'''
def remove_duplicates(string):
'''
Remove duplicated characters in string
'''
result = []
seen = set()
for char in string:
if char not in seen:
seen.add(char)
result.append(char)
return ''.join(result)
|
Add function to remove duplicates from a string'''
In this module we remove duplicates in a string
'''
def remove_duplicates(string):
'''
Remove duplicated characters in string
'''
result = []
seen = set()
for char in string:
if char not in seen:
seen.add(char)
result.append(char)
return ''.join(result)
|
<commit_before><commit_msg>Add function to remove duplicates from a string<commit_after>'''
In this module we remove duplicates in a string
'''
def remove_duplicates(string):
'''
Remove duplicated characters in string
'''
result = []
seen = set()
for char in string:
if char not in seen:
seen.add(char)
result.append(char)
return ''.join(result)
|
|
7f84c6914182cd83192661dfe165fad9c59014ef
|
indra/tests/test_grounding_resources.py
|
indra/tests/test_grounding_resources.py
|
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
|
Add tests to check validity of gmap and misg_map
|
Add tests to check validity of gmap and misg_map
|
Python
|
bsd-2-clause
|
johnbachman/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,johnbachman/indra
|
Add tests to check validity of gmap and misg_map
|
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
|
<commit_before><commit_msg>Add tests to check validity of gmap and misg_map<commit_after>
|
import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
|
Add tests to check validity of gmap and misg_mapimport os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
|
<commit_before><commit_msg>Add tests to check validity of gmap and misg_map<commit_after>import os
import csv
from indra.statements.validate import validate_db_refs, validate_ns
from indra.preassembler.grounding_mapper import default_grounding_map
from indra.preassembler.grounding_mapper import default_misgrounding_map
# Namespaces that are not currently handled but still appear in statements
exceptions = ['CLO']
def test_misgrounding_map_entries():
bad_entries = []
for text, db_refs in default_misgrounding_map.items():
if not validate_db_refs(db_refs):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_grounding_map_entries():
bad_entries = []
for text, db_refs in default_grounding_map.items():
if (not validate_db_refs(db_refs) and
not (set(exceptions) & db_refs.keys())):
bad_entries.append([text, db_refs])
assert not bad_entries, bad_entries
def test_exceptional_unhandled():
"""Test that exceptional namespaces actually aren't handled.
This will catch if we make an update that makes an exceptional namespace
become a handled namespace. That way we can update the tests.
"""
actually_handled = []
for ns in exceptions:
if validate_ns(ns):
actually_handled.append(ns)
assert not actually_handled, actually_handled
|
|
8b8208a5a4790b199d0f35ca834f0982788c99c1
|
ConnectTor.py
|
ConnectTor.py
|
#!/usr/bin/python
import urllib, urllib2
import socks
import socket
import sys
import commands
# Connect Tor Network
def GET_MY_IP_Address():
return commands.getoutput("/sbin/ifconfig").split("\n")[1].split(':')[1].split(" ")[0]
def Connect_Tor():
while True:
try:
token = 1
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
socket.socket = socks.socksocket
url = 'http://ifconfig.me/ip'
request = urllib2.Request(url)
request.add_header('Cache-Control','max-age=0')
response = urllib2.urlopen(request)
nowip = response.read()
userip = GET_MY_IP_Address()
print "Connect Tor Network Complete. \n"
print "Ethernet IP Address : " + userip + "\n" + "\033[0;31mSet Tor IP Address : " + nowip + "\033[0m <c0derab1e>"
except:
toto = "false"
return toto
if token == 1:
break
if __name__ == '__main__':
url = sys.argv[1]
#print "Connecting Tor Network... Please Wait..."
Connect_Tor()
|
Connect Tor Network with Python
|
Connect Tor Network with Python
Connect Tor Network with Python
1. apt-get install tor
2. apt-get install privoxy
(we need to edit our privoxy config (/etc/privoxy/config) and add the
following line -> forward-socks4a / localhost:9050 .)
3. Start tor & privoxy service
( service tor privoxy start)
|
Python
|
mit
|
namegpark/Tor-Network-Connection
|
Connect Tor Network with Python
Connect Tor Network with Python
1. apt-get install tor
2. apt-get install privoxy
(we need to edit our privoxy config (/etc/privoxy/config) and add the
following line -> forward-socks4a / localhost:9050 .)
3. Start tor & privoxy service
( service tor privoxy start)
|
#!/usr/bin/python
import urllib, urllib2
import socks
import socket
import sys
import commands
# Connect Tor Network
def GET_MY_IP_Address():
return commands.getoutput("/sbin/ifconfig").split("\n")[1].split(':')[1].split(" ")[0]
def Connect_Tor():
while True:
try:
token = 1
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
socket.socket = socks.socksocket
url = 'http://ifconfig.me/ip'
request = urllib2.Request(url)
request.add_header('Cache-Control','max-age=0')
response = urllib2.urlopen(request)
nowip = response.read()
userip = GET_MY_IP_Address()
print "Connect Tor Network Complete. \n"
print "Ethernet IP Address : " + userip + "\n" + "\033[0;31mSet Tor IP Address : " + nowip + "\033[0m <c0derab1e>"
except:
toto = "false"
return toto
if token == 1:
break
if __name__ == '__main__':
url = sys.argv[1]
#print "Connecting Tor Network... Please Wait..."
Connect_Tor()
|
<commit_before><commit_msg>Connect Tor Network with Python
Connect Tor Network with Python
1. apt-get install tor
2. apt-get install privoxy
(we need to edit our privoxy config (/etc/privoxy/config) and add the
following line -> forward-socks4a / localhost:9050 .)
3. Start tor & privoxy service
( service tor privoxy start)<commit_after>
|
#!/usr/bin/python
import urllib, urllib2
import socks
import socket
import sys
import commands
# Connect Tor Network
def GET_MY_IP_Address():
return commands.getoutput("/sbin/ifconfig").split("\n")[1].split(':')[1].split(" ")[0]
def Connect_Tor():
while True:
try:
token = 1
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
socket.socket = socks.socksocket
url = 'http://ifconfig.me/ip'
request = urllib2.Request(url)
request.add_header('Cache-Control','max-age=0')
response = urllib2.urlopen(request)
nowip = response.read()
userip = GET_MY_IP_Address()
print "Connect Tor Network Complete. \n"
print "Ethernet IP Address : " + userip + "\n" + "\033[0;31mSet Tor IP Address : " + nowip + "\033[0m <c0derab1e>"
except:
toto = "false"
return toto
if token == 1:
break
if __name__ == '__main__':
url = sys.argv[1]
#print "Connecting Tor Network... Please Wait..."
Connect_Tor()
|
Connect Tor Network with Python
Connect Tor Network with Python
1. apt-get install tor
2. apt-get install privoxy
(we need to edit our privoxy config (/etc/privoxy/config) and add the
following line -> forward-socks4a / localhost:9050 .)
3. Start tor & privoxy service
( service tor privoxy start)#!/usr/bin/python
import urllib, urllib2
import socks
import socket
import sys
import commands
# Connect Tor Network
def GET_MY_IP_Address():
return commands.getoutput("/sbin/ifconfig").split("\n")[1].split(':')[1].split(" ")[0]
def Connect_Tor():
while True:
try:
token = 1
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
socket.socket = socks.socksocket
url = 'http://ifconfig.me/ip'
request = urllib2.Request(url)
request.add_header('Cache-Control','max-age=0')
response = urllib2.urlopen(request)
nowip = response.read()
userip = GET_MY_IP_Address()
print "Connect Tor Network Complete. \n"
print "Ethernet IP Address : " + userip + "\n" + "\033[0;31mSet Tor IP Address : " + nowip + "\033[0m <c0derab1e>"
except:
toto = "false"
return toto
if token == 1:
break
if __name__ == '__main__':
url = sys.argv[1]
#print "Connecting Tor Network... Please Wait..."
Connect_Tor()
|
<commit_before><commit_msg>Connect Tor Network with Python
Connect Tor Network with Python
1. apt-get install tor
2. apt-get install privoxy
(we need to edit our privoxy config (/etc/privoxy/config) and add the
following line -> forward-socks4a / localhost:9050 .)
3. Start tor & privoxy service
( service tor privoxy start)<commit_after>#!/usr/bin/python
import urllib, urllib2
import socks
import socket
import sys
import commands
# Connect Tor Network
def GET_MY_IP_Address():
return commands.getoutput("/sbin/ifconfig").split("\n")[1].split(':')[1].split(" ")[0]
def Connect_Tor():
while True:
try:
token = 1
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
socket.socket = socks.socksocket
url = 'http://ifconfig.me/ip'
request = urllib2.Request(url)
request.add_header('Cache-Control','max-age=0')
response = urllib2.urlopen(request)
nowip = response.read()
userip = GET_MY_IP_Address()
print "Connect Tor Network Complete. \n"
print "Ethernet IP Address : " + userip + "\n" + "\033[0;31mSet Tor IP Address : " + nowip + "\033[0m <c0derab1e>"
except:
toto = "false"
return toto
if token == 1:
break
if __name__ == '__main__':
url = sys.argv[1]
#print "Connecting Tor Network... Please Wait..."
Connect_Tor()
|
|
4cf8ae2ab95e9c7ed1a091532f12a4211f7580b7
|
textingtree.py
|
textingtree.py
|
import os
import requests
import tinycss2
from tinycss2 import color3
from flask import Flask, Response, request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def merry_christmas():
return 'Merry Christmas!'
@app.route('/sms', methods=['POST'])
def sms():
body = request.values.get('Body', None)
if body is None:
return Response(mimetype='text/plain')
sms = body.lower()
print sms
rgba = tinycss2.color3.parse_color(sms)
if rgba is None:
return Response("Sorry, I don't recognize that color.", mimetype='text/plain')
if len(rgba) == 4:
red = int(round(255*rgba[0]))
green = int(round(255*rgba[1]))
blue = int(round(255*rgba[2]))
rgb_string = '[{0:03d},{1:03d},{2:03d}]'.format(red, green, blue)
payload = {'access_token': os.environ['SPARK_ACCESS_TOKEN'], 'command': rgb_string}
r = requests.post("https://api.spark.io/v1/devices/{0}/color".format(os.environ['SPARK_CORE_ID']), data=payload)
return Response(mimetype='text/plain')
if __name__ == '__main__':
app.run()
|
Add application code with SMS route to accept SMS and route to Spark Core via their API
|
Add application code with SMS route to accept SMS and route to Spark Core via their API
|
Python
|
mit
|
willdages/The-Texting-Tree
|
Add application code with SMS route to accept SMS and route to Spark Core via their API
|
import os
import requests
import tinycss2
from tinycss2 import color3
from flask import Flask, Response, request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def merry_christmas():
return 'Merry Christmas!'
@app.route('/sms', methods=['POST'])
def sms():
body = request.values.get('Body', None)
if body is None:
return Response(mimetype='text/plain')
sms = body.lower()
print sms
rgba = tinycss2.color3.parse_color(sms)
if rgba is None:
return Response("Sorry, I don't recognize that color.", mimetype='text/plain')
if len(rgba) == 4:
red = int(round(255*rgba[0]))
green = int(round(255*rgba[1]))
blue = int(round(255*rgba[2]))
rgb_string = '[{0:03d},{1:03d},{2:03d}]'.format(red, green, blue)
payload = {'access_token': os.environ['SPARK_ACCESS_TOKEN'], 'command': rgb_string}
r = requests.post("https://api.spark.io/v1/devices/{0}/color".format(os.environ['SPARK_CORE_ID']), data=payload)
return Response(mimetype='text/plain')
if __name__ == '__main__':
app.run()
|
<commit_before><commit_msg>Add application code with SMS route to accept SMS and route to Spark Core via their API<commit_after>
|
import os
import requests
import tinycss2
from tinycss2 import color3
from flask import Flask, Response, request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def merry_christmas():
return 'Merry Christmas!'
@app.route('/sms', methods=['POST'])
def sms():
body = request.values.get('Body', None)
if body is None:
return Response(mimetype='text/plain')
sms = body.lower()
print sms
rgba = tinycss2.color3.parse_color(sms)
if rgba is None:
return Response("Sorry, I don't recognize that color.", mimetype='text/plain')
if len(rgba) == 4:
red = int(round(255*rgba[0]))
green = int(round(255*rgba[1]))
blue = int(round(255*rgba[2]))
rgb_string = '[{0:03d},{1:03d},{2:03d}]'.format(red, green, blue)
payload = {'access_token': os.environ['SPARK_ACCESS_TOKEN'], 'command': rgb_string}
r = requests.post("https://api.spark.io/v1/devices/{0}/color".format(os.environ['SPARK_CORE_ID']), data=payload)
return Response(mimetype='text/plain')
if __name__ == '__main__':
app.run()
|
Add application code with SMS route to accept SMS and route to Spark Core via their APIimport os
import requests
import tinycss2
from tinycss2 import color3
from flask import Flask, Response, request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def merry_christmas():
return 'Merry Christmas!'
@app.route('/sms', methods=['POST'])
def sms():
body = request.values.get('Body', None)
if body is None:
return Response(mimetype='text/plain')
sms = body.lower()
print sms
rgba = tinycss2.color3.parse_color(sms)
if rgba is None:
return Response("Sorry, I don't recognize that color.", mimetype='text/plain')
if len(rgba) == 4:
red = int(round(255*rgba[0]))
green = int(round(255*rgba[1]))
blue = int(round(255*rgba[2]))
rgb_string = '[{0:03d},{1:03d},{2:03d}]'.format(red, green, blue)
payload = {'access_token': os.environ['SPARK_ACCESS_TOKEN'], 'command': rgb_string}
r = requests.post("https://api.spark.io/v1/devices/{0}/color".format(os.environ['SPARK_CORE_ID']), data=payload)
return Response(mimetype='text/plain')
if __name__ == '__main__':
app.run()
|
<commit_before><commit_msg>Add application code with SMS route to accept SMS and route to Spark Core via their API<commit_after>import os
import requests
import tinycss2
from tinycss2 import color3
from flask import Flask, Response, request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def merry_christmas():
return 'Merry Christmas!'
@app.route('/sms', methods=['POST'])
def sms():
body = request.values.get('Body', None)
if body is None:
return Response(mimetype='text/plain')
sms = body.lower()
print sms
rgba = tinycss2.color3.parse_color(sms)
if rgba is None:
return Response("Sorry, I don't recognize that color.", mimetype='text/plain')
if len(rgba) == 4:
red = int(round(255*rgba[0]))
green = int(round(255*rgba[1]))
blue = int(round(255*rgba[2]))
rgb_string = '[{0:03d},{1:03d},{2:03d}]'.format(red, green, blue)
payload = {'access_token': os.environ['SPARK_ACCESS_TOKEN'], 'command': rgb_string}
r = requests.post("https://api.spark.io/v1/devices/{0}/color".format(os.environ['SPARK_CORE_ID']), data=payload)
return Response(mimetype='text/plain')
if __name__ == '__main__':
app.run()
|
|
bbaddeed4cc57af287b61ec79e74e5061c1b2e84
|
app/segment.py
|
app/segment.py
|
class Segment:
'''A representation of a phonetic segment, stored in terms of features.'''
def __init__(self, positive, negative):
self._positive = positive
self._negative = negative
@classmethod
def from_dictionary(cls, feature_dictionary):
'''Initialise the segment from a dictionary of features. The feature name
is the key, and the value is one of '+', '-', or '0'. The only ignored
key is "IPA".'''
positive = [key for key, value in feature_dictionary.items()
if value == '+']
negative = [key for key, value in feature_dictionary.items()
if value == '-']
return cls(positive, negative)
@property
def positive(self):
return self._positive
@positive.setter
def positive(self, feature):
'''Add the feature to the positive list. If it already exists in the
negative list, remove it from negative.'''
if feature not in self._positive:
if feature in self._negative:
self._negative.remove(feature)
self._positive.append(feature)
@property
def negative(self):
return self._negative
@negative.setter
def negative(self, feature):
'''Add the feature to the negative list. If it already exists in the
positive list, remove it from positive.'''
if feature not in self._negative:
if feature in self._positive:
self._positive.remove(feature)
self._negative.append(feature)
|
Create Segment class with positive and negative properties
|
Create Segment class with positive and negative properties
|
Python
|
mit
|
kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve
|
Create Segment class with positive and negative properties
|
class Segment:
'''A representation of a phonetic segment, stored in terms of features.'''
def __init__(self, positive, negative):
self._positive = positive
self._negative = negative
@classmethod
def from_dictionary(cls, feature_dictionary):
'''Initialise the segment from a dictionary of features. The feature name
is the key, and the value is one of '+', '-', or '0'. The only ignored
key is "IPA".'''
positive = [key for key, value in feature_dictionary.items()
if value == '+']
negative = [key for key, value in feature_dictionary.items()
if value == '-']
return cls(positive, negative)
@property
def positive(self):
return self._positive
@positive.setter
def positive(self, feature):
'''Add the feature to the positive list. If it already exists in the
negative list, remove it from negative.'''
if feature not in self._positive:
if feature in self._negative:
self._negative.remove(feature)
self._positive.append(feature)
@property
def negative(self):
return self._negative
@negative.setter
def negative(self, feature):
'''Add the feature to the negative list. If it already exists in the
positive list, remove it from positive.'''
if feature not in self._negative:
if feature in self._positive:
self._positive.remove(feature)
self._negative.append(feature)
|
<commit_before><commit_msg>Create Segment class with positive and negative properties<commit_after>
|
class Segment:
'''A representation of a phonetic segment, stored in terms of features.'''
def __init__(self, positive, negative):
self._positive = positive
self._negative = negative
@classmethod
def from_dictionary(cls, feature_dictionary):
'''Initialise the segment from a dictionary of features. The feature name
is the key, and the value is one of '+', '-', or '0'. The only ignored
key is "IPA".'''
positive = [key for key, value in feature_dictionary.items()
if value == '+']
negative = [key for key, value in feature_dictionary.items()
if value == '-']
return cls(positive, negative)
@property
def positive(self):
return self._positive
@positive.setter
def positive(self, feature):
'''Add the feature to the positive list. If it already exists in the
negative list, remove it from negative.'''
if feature not in self._positive:
if feature in self._negative:
self._negative.remove(feature)
self._positive.append(feature)
@property
def negative(self):
return self._negative
@negative.setter
def negative(self, feature):
'''Add the feature to the negative list. If it already exists in the
positive list, remove it from positive.'''
if feature not in self._negative:
if feature in self._positive:
self._positive.remove(feature)
self._negative.append(feature)
|
Create Segment class with positive and negative propertiesclass Segment:
'''A representation of a phonetic segment, stored in terms of features.'''
def __init__(self, positive, negative):
self._positive = positive
self._negative = negative
@classmethod
def from_dictionary(cls, feature_dictionary):
'''Initialise the segment from a dictionary of features. The feature name
is the key, and the value is one of '+', '-', or '0'. The only ignored
key is "IPA".'''
positive = [key for key, value in feature_dictionary.items()
if value == '+']
negative = [key for key, value in feature_dictionary.items()
if value == '-']
return cls(positive, negative)
@property
def positive(self):
return self._positive
@positive.setter
def positive(self, feature):
'''Add the feature to the positive list. If it already exists in the
negative list, remove it from negative.'''
if feature not in self._positive:
if feature in self._negative:
self._negative.remove(feature)
self._positive.append(feature)
@property
def negative(self):
return self._negative
@negative.setter
def negative(self, feature):
'''Add the feature to the negative list. If it already exists in the
positive list, remove it from positive.'''
if feature not in self._negative:
if feature in self._positive:
self._positive.remove(feature)
self._negative.append(feature)
|
<commit_before><commit_msg>Create Segment class with positive and negative properties<commit_after>class Segment:
'''A representation of a phonetic segment, stored in terms of features.'''
def __init__(self, positive, negative):
self._positive = positive
self._negative = negative
@classmethod
def from_dictionary(cls, feature_dictionary):
'''Initialise the segment from a dictionary of features. The feature name
is the key, and the value is one of '+', '-', or '0'. The only ignored
key is "IPA".'''
positive = [key for key, value in feature_dictionary.items()
if value == '+']
negative = [key for key, value in feature_dictionary.items()
if value == '-']
return cls(positive, negative)
@property
def positive(self):
return self._positive
@positive.setter
def positive(self, feature):
'''Add the feature to the positive list. If it already exists in the
negative list, remove it from negative.'''
if feature not in self._positive:
if feature in self._negative:
self._negative.remove(feature)
self._positive.append(feature)
@property
def negative(self):
return self._negative
@negative.setter
def negative(self, feature):
'''Add the feature to the negative list. If it already exists in the
positive list, remove it from positive.'''
if feature not in self._negative:
if feature in self._positive:
self._positive.remove(feature)
self._negative.append(feature)
|
|
1637f26fc9ee852e0eda33056bca7dd442e0df05
|
data.py
|
data.py
|
from twitter import *
#Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('', '',
'', ''))
#get user status
print(t.statuses.user_timeline(screen_name="AndrewKLeech"))
|
Access twitter, get user info
|
Access twitter, get user info
|
Python
|
mit
|
AndrewKLeech/Pip-Boy
|
Access twitter, get user info
|
from twitter import *
#Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('', '',
'', ''))
#get user status
print(t.statuses.user_timeline(screen_name="AndrewKLeech"))
|
<commit_before><commit_msg>Access twitter, get user info<commit_after>
|
from twitter import *
#Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('', '',
'', ''))
#get user status
print(t.statuses.user_timeline(screen_name="AndrewKLeech"))
|
Access twitter, get user infofrom twitter import *
#Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('', '',
'', ''))
#get user status
print(t.statuses.user_timeline(screen_name="AndrewKLeech"))
|
<commit_before><commit_msg>Access twitter, get user info<commit_after>from twitter import *
#Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('', '',
'', ''))
#get user status
print(t.statuses.user_timeline(screen_name="AndrewKLeech"))
|
|
4f7b2096da44b17abc3f8cac77e40bee527b48aa
|
algorithms/140_word_break_ii.py
|
algorithms/140_word_break_ii.py
|
#!/usr/bin/env python
#
# Given a string s and a dictionary of words dict, add spaces in s to construct
# a sentence where each word is a valid dictionary word. Return all such possible
# sentences.
#
# LeetCode Runtime: 44 ms (beats 100% of Python coders)
# Author: oleg@osv.im
import sys
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: bool
"""
if not s or not wordDict:
return []
minWord = len(min(wordDict, key=len))
maxWord = len(max(wordDict, key=len))
wordDict = set(filter(lambda x: len(x) <= len(s), wordDict))
# True if s[:i] is breakable
mem = [set() for i in range(len(s) + 1)]
for i in xrange(len(s)):
if not mem[i] and i is not 0:
continue
maxWordI = min(maxWord + i + 1, len(s))
for j in xrange(minWord + i, maxWordI + 1):
word = s[i:j]
if word in wordDict:
mem[j].add(word)
if not mem[len(s)]:
return []
words = [(x, 0) for x in mem[len(s)]]
while True:
done = True
num = len(words)
for i in range(num):
phrase, spaces = words.pop(0)
if len(phrase) - spaces is not len(s):
done = False
prevPhrases = mem[len(s) - len(phrase) + spaces]
for prev in prevPhrases:
words.append((prev + " " + phrase, spaces + 1))
if not prevPhrases:
raise Exception('Illegal state!')
else:
words.append((phrase, spaces))
if done:
return [x[0] for x in words]
if __name__ == '__main__':
if len(sys.argv) == 3:
print Solution().wordBreak(sys.argv[1],
[x[1:-1] for x in sys.argv[2][1:-1].split(',')])
|
Add 140: Word Break II
|
Add 140: Word Break II
|
Python
|
apache-2.0
|
ovaskevich/leetcode,ovaskevich/leetcode
|
Add 140: Word Break II
|
#!/usr/bin/env python
#
# Given a string s and a dictionary of words dict, add spaces in s to construct
# a sentence where each word is a valid dictionary word. Return all such possible
# sentences.
#
# LeetCode Runtime: 44 ms (beats 100% of Python coders)
# Author: oleg@osv.im
import sys
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: bool
"""
if not s or not wordDict:
return []
minWord = len(min(wordDict, key=len))
maxWord = len(max(wordDict, key=len))
wordDict = set(filter(lambda x: len(x) <= len(s), wordDict))
# True if s[:i] is breakable
mem = [set() for i in range(len(s) + 1)]
for i in xrange(len(s)):
if not mem[i] and i is not 0:
continue
maxWordI = min(maxWord + i + 1, len(s))
for j in xrange(minWord + i, maxWordI + 1):
word = s[i:j]
if word in wordDict:
mem[j].add(word)
if not mem[len(s)]:
return []
words = [(x, 0) for x in mem[len(s)]]
while True:
done = True
num = len(words)
for i in range(num):
phrase, spaces = words.pop(0)
if len(phrase) - spaces is not len(s):
done = False
prevPhrases = mem[len(s) - len(phrase) + spaces]
for prev in prevPhrases:
words.append((prev + " " + phrase, spaces + 1))
if not prevPhrases:
raise Exception('Illegal state!')
else:
words.append((phrase, spaces))
if done:
return [x[0] for x in words]
if __name__ == '__main__':
if len(sys.argv) == 3:
print Solution().wordBreak(sys.argv[1],
[x[1:-1] for x in sys.argv[2][1:-1].split(',')])
|
<commit_before><commit_msg>Add 140: Word Break II<commit_after>
|
#!/usr/bin/env python
#
# Given a string s and a dictionary of words dict, add spaces in s to construct
# a sentence where each word is a valid dictionary word. Return all such possible
# sentences.
#
# LeetCode Runtime: 44 ms (beats 100% of Python coders)
# Author: oleg@osv.im
import sys
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: bool
"""
if not s or not wordDict:
return []
minWord = len(min(wordDict, key=len))
maxWord = len(max(wordDict, key=len))
wordDict = set(filter(lambda x: len(x) <= len(s), wordDict))
# True if s[:i] is breakable
mem = [set() for i in range(len(s) + 1)]
for i in xrange(len(s)):
if not mem[i] and i is not 0:
continue
maxWordI = min(maxWord + i + 1, len(s))
for j in xrange(minWord + i, maxWordI + 1):
word = s[i:j]
if word in wordDict:
mem[j].add(word)
if not mem[len(s)]:
return []
words = [(x, 0) for x in mem[len(s)]]
while True:
done = True
num = len(words)
for i in range(num):
phrase, spaces = words.pop(0)
if len(phrase) - spaces is not len(s):
done = False
prevPhrases = mem[len(s) - len(phrase) + spaces]
for prev in prevPhrases:
words.append((prev + " " + phrase, spaces + 1))
if not prevPhrases:
raise Exception('Illegal state!')
else:
words.append((phrase, spaces))
if done:
return [x[0] for x in words]
if __name__ == '__main__':
if len(sys.argv) == 3:
print Solution().wordBreak(sys.argv[1],
[x[1:-1] for x in sys.argv[2][1:-1].split(',')])
|
Add 140: Word Break II#!/usr/bin/env python
#
# Given a string s and a dictionary of words dict, add spaces in s to construct
# a sentence where each word is a valid dictionary word. Return all such possible
# sentences.
#
# LeetCode Runtime: 44 ms (beats 100% of Python coders)
# Author: oleg@osv.im
import sys
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: bool
"""
if not s or not wordDict:
return []
minWord = len(min(wordDict, key=len))
maxWord = len(max(wordDict, key=len))
wordDict = set(filter(lambda x: len(x) <= len(s), wordDict))
# True if s[:i] is breakable
mem = [set() for i in range(len(s) + 1)]
for i in xrange(len(s)):
if not mem[i] and i is not 0:
continue
maxWordI = min(maxWord + i + 1, len(s))
for j in xrange(minWord + i, maxWordI + 1):
word = s[i:j]
if word in wordDict:
mem[j].add(word)
if not mem[len(s)]:
return []
words = [(x, 0) for x in mem[len(s)]]
while True:
done = True
num = len(words)
for i in range(num):
phrase, spaces = words.pop(0)
if len(phrase) - spaces is not len(s):
done = False
prevPhrases = mem[len(s) - len(phrase) + spaces]
for prev in prevPhrases:
words.append((prev + " " + phrase, spaces + 1))
if not prevPhrases:
raise Exception('Illegal state!')
else:
words.append((phrase, spaces))
if done:
return [x[0] for x in words]
if __name__ == '__main__':
if len(sys.argv) == 3:
print Solution().wordBreak(sys.argv[1],
[x[1:-1] for x in sys.argv[2][1:-1].split(',')])
|
<commit_before><commit_msg>Add 140: Word Break II<commit_after>#!/usr/bin/env python
#
# Given a string s and a dictionary of words dict, add spaces in s to construct
# a sentence where each word is a valid dictionary word. Return all such possible
# sentences.
#
# LeetCode Runtime: 44 ms (beats 100% of Python coders)
# Author: oleg@osv.im
import sys
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: Set[str]
:rtype: bool
"""
if not s or not wordDict:
return []
minWord = len(min(wordDict, key=len))
maxWord = len(max(wordDict, key=len))
wordDict = set(filter(lambda x: len(x) <= len(s), wordDict))
# True if s[:i] is breakable
mem = [set() for i in range(len(s) + 1)]
for i in xrange(len(s)):
if not mem[i] and i is not 0:
continue
maxWordI = min(maxWord + i + 1, len(s))
for j in xrange(minWord + i, maxWordI + 1):
word = s[i:j]
if word in wordDict:
mem[j].add(word)
if not mem[len(s)]:
return []
words = [(x, 0) for x in mem[len(s)]]
while True:
done = True
num = len(words)
for i in range(num):
phrase, spaces = words.pop(0)
if len(phrase) - spaces is not len(s):
done = False
prevPhrases = mem[len(s) - len(phrase) + spaces]
for prev in prevPhrases:
words.append((prev + " " + phrase, spaces + 1))
if not prevPhrases:
raise Exception('Illegal state!')
else:
words.append((phrase, spaces))
if done:
return [x[0] for x in words]
if __name__ == '__main__':
if len(sys.argv) == 3:
print Solution().wordBreak(sys.argv[1],
[x[1:-1] for x in sys.argv[2][1:-1].split(',')])
|
|
ffe2b546218bce0f03ac09736b89c5b098a2c0f1
|
tests/test_eccodes.py
|
tests/test_eccodes.py
|
import os.path
import numpy as np
import pytest
from eccodes import *
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib')
# GRIB
def test_grib_read():
gid = codes_grib_new_from_samples('regular_ll_sfc_grib1')
assert codes_get(gid, 'Ni') == 16
assert codes_get(gid, 'Nj') == 31
assert codes_get(gid, 'centre', str) == 'ecmf'
codes_release(gid)
def test_grib_write(tmpdir):
gid = codes_grib_new_from_samples('GRIB2')
codes_set(gid, 'backgroundProcess', 44)
output = tmpdir.join('test_grib_write.grib')
with open(str(output), 'wb') as fout:
codes_write(gid, fout)
codes_release(gid)
# BUFR
def test_bufr_read_write(tmpdir):
bid = codes_bufr_new_from_samples('BUFR4')
codes_set(bid, 'unpack', 1)
assert codes_get(bid, 'typicalYear') == 2012
assert codes_get(bid, 'centre', str) == 'ecmf'
codes_set(bid, 'totalSunshine', 13)
codes_set(bid, 'pack', 1)
output = tmpdir.join('test_bufr_write.bufr')
with open(str(output), 'wb') as fout:
codes_write(bid, fout)
assert codes_get(bid, 'totalSunshine') == 13
codes_release(bid)
|
Add tests for basic functionality
|
Add tests for basic functionality
|
Python
|
apache-2.0
|
ecmwf/eccodes-python,ecmwf/eccodes-python
|
Add tests for basic functionality
|
import os.path
import numpy as np
import pytest
from eccodes import *
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib')
# GRIB
def test_grib_read():
gid = codes_grib_new_from_samples('regular_ll_sfc_grib1')
assert codes_get(gid, 'Ni') == 16
assert codes_get(gid, 'Nj') == 31
assert codes_get(gid, 'centre', str) == 'ecmf'
codes_release(gid)
def test_grib_write(tmpdir):
gid = codes_grib_new_from_samples('GRIB2')
codes_set(gid, 'backgroundProcess', 44)
output = tmpdir.join('test_grib_write.grib')
with open(str(output), 'wb') as fout:
codes_write(gid, fout)
codes_release(gid)
# BUFR
def test_bufr_read_write(tmpdir):
bid = codes_bufr_new_from_samples('BUFR4')
codes_set(bid, 'unpack', 1)
assert codes_get(bid, 'typicalYear') == 2012
assert codes_get(bid, 'centre', str) == 'ecmf'
codes_set(bid, 'totalSunshine', 13)
codes_set(bid, 'pack', 1)
output = tmpdir.join('test_bufr_write.bufr')
with open(str(output), 'wb') as fout:
codes_write(bid, fout)
assert codes_get(bid, 'totalSunshine') == 13
codes_release(bid)
|
<commit_before><commit_msg>Add tests for basic functionality<commit_after>
|
import os.path
import numpy as np
import pytest
from eccodes import *
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib')
# GRIB
def test_grib_read():
gid = codes_grib_new_from_samples('regular_ll_sfc_grib1')
assert codes_get(gid, 'Ni') == 16
assert codes_get(gid, 'Nj') == 31
assert codes_get(gid, 'centre', str) == 'ecmf'
codes_release(gid)
def test_grib_write(tmpdir):
gid = codes_grib_new_from_samples('GRIB2')
codes_set(gid, 'backgroundProcess', 44)
output = tmpdir.join('test_grib_write.grib')
with open(str(output), 'wb') as fout:
codes_write(gid, fout)
codes_release(gid)
# BUFR
def test_bufr_read_write(tmpdir):
bid = codes_bufr_new_from_samples('BUFR4')
codes_set(bid, 'unpack', 1)
assert codes_get(bid, 'typicalYear') == 2012
assert codes_get(bid, 'centre', str) == 'ecmf'
codes_set(bid, 'totalSunshine', 13)
codes_set(bid, 'pack', 1)
output = tmpdir.join('test_bufr_write.bufr')
with open(str(output), 'wb') as fout:
codes_write(bid, fout)
assert codes_get(bid, 'totalSunshine') == 13
codes_release(bid)
|
Add tests for basic functionalityimport os.path
import numpy as np
import pytest
from eccodes import *
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib')
# GRIB
def test_grib_read():
gid = codes_grib_new_from_samples('regular_ll_sfc_grib1')
assert codes_get(gid, 'Ni') == 16
assert codes_get(gid, 'Nj') == 31
assert codes_get(gid, 'centre', str) == 'ecmf'
codes_release(gid)
def test_grib_write(tmpdir):
gid = codes_grib_new_from_samples('GRIB2')
codes_set(gid, 'backgroundProcess', 44)
output = tmpdir.join('test_grib_write.grib')
with open(str(output), 'wb') as fout:
codes_write(gid, fout)
codes_release(gid)
# BUFR
def test_bufr_read_write(tmpdir):
bid = codes_bufr_new_from_samples('BUFR4')
codes_set(bid, 'unpack', 1)
assert codes_get(bid, 'typicalYear') == 2012
assert codes_get(bid, 'centre', str) == 'ecmf'
codes_set(bid, 'totalSunshine', 13)
codes_set(bid, 'pack', 1)
output = tmpdir.join('test_bufr_write.bufr')
with open(str(output), 'wb') as fout:
codes_write(bid, fout)
assert codes_get(bid, 'totalSunshine') == 13
codes_release(bid)
|
<commit_before><commit_msg>Add tests for basic functionality<commit_after>import os.path
import numpy as np
import pytest
from eccodes import *
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
TEST_DATA = os.path.join(SAMPLE_DATA_FOLDER, 'era5-levels-members.grib')
# GRIB
def test_grib_read():
gid = codes_grib_new_from_samples('regular_ll_sfc_grib1')
assert codes_get(gid, 'Ni') == 16
assert codes_get(gid, 'Nj') == 31
assert codes_get(gid, 'centre', str) == 'ecmf'
codes_release(gid)
def test_grib_write(tmpdir):
gid = codes_grib_new_from_samples('GRIB2')
codes_set(gid, 'backgroundProcess', 44)
output = tmpdir.join('test_grib_write.grib')
with open(str(output), 'wb') as fout:
codes_write(gid, fout)
codes_release(gid)
# BUFR
def test_bufr_read_write(tmpdir):
bid = codes_bufr_new_from_samples('BUFR4')
codes_set(bid, 'unpack', 1)
assert codes_get(bid, 'typicalYear') == 2012
assert codes_get(bid, 'centre', str) == 'ecmf'
codes_set(bid, 'totalSunshine', 13)
codes_set(bid, 'pack', 1)
output = tmpdir.join('test_bufr_write.bufr')
with open(str(output), 'wb') as fout:
codes_write(bid, fout)
assert codes_get(bid, 'totalSunshine') == 13
codes_release(bid)
|
|
59917abe1c07dd92636e3f00f74a9e9a95947f0f
|
lowfat/migrations/0150_update_default_year.py
|
lowfat/migrations/0150_update_default_year.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-23 09:39
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0149_add_fund_approval_chain'),
]
operations = [
migrations.AlterModelOptions(
name='termsandconditions',
options={'ordering': ['-year'], 'verbose_name_plural': 'terms and conditions'},
),
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
]
|
Update default year in migrations
|
Update default year in migrations
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
Update default year in migrations
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-23 09:39
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0149_add_fund_approval_chain'),
]
operations = [
migrations.AlterModelOptions(
name='termsandconditions',
options={'ordering': ['-year'], 'verbose_name_plural': 'terms and conditions'},
),
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
]
|
<commit_before><commit_msg>Update default year in migrations<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-23 09:39
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0149_add_fund_approval_chain'),
]
operations = [
migrations.AlterModelOptions(
name='termsandconditions',
options={'ordering': ['-year'], 'verbose_name_plural': 'terms and conditions'},
),
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
]
|
Update default year in migrations# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-23 09:39
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0149_add_fund_approval_chain'),
]
operations = [
migrations.AlterModelOptions(
name='termsandconditions',
options={'ordering': ['-year'], 'verbose_name_plural': 'terms and conditions'},
),
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
]
|
<commit_before><commit_msg>Update default year in migrations<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-01-23 09:39
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0149_add_fund_approval_chain'),
]
operations = [
migrations.AlterModelOptions(
name='termsandconditions',
options={'ordering': ['-year'], 'verbose_name_plural': 'terms and conditions'},
),
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2020),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2022, 3, 31)),
),
]
|
|
9732f5e1bb667b6683c9e97db03d293373909da6
|
tests/test_process.py
|
tests/test_process.py
|
import unittest
import logging
import time
from util import get_hostname
from tests.common import load_check
from nose.plugins.attrib import attr
logging.basicConfig()
@attr('process')
class ProcessTestCase(unittest.TestCase):
def build_config(self, config, n):
critical_low = [2, 2, 2, -1, 2, -2, 2]
critical_high = [2, 2, 2, 3, -1, 4, -2]
warning_low = [1, -1, 2, -1, 2, -1, 2]
warning_high = [1, 3, -1, 2, -1, 3, -1]
for i in range(7):
name = 'ssh' + str(i)
config['instances'].append({
'name': name,
'search_string': ['ssh', 'sshd'],
'thresholds': {
'critical': [n - critical_low[i], n + critical_high[i]],
'warning': [n - warning_low[i], n + warning_high[i]]
}
})
return config
def testCheck(self):
config = {
'init_config': {},
'instances': []
}
self.agentConfig = {
'version': '0.1',
'api_key': 'toto'
}
search_string = ['ssh', 'sshd']
self.check = load_check('process', config, self.agentConfig)
pids = self.check.find_pids(search_string)
config = self.build_config(config, len(pids))
for i in range(7):
self.check.check(config['instances'][i])
time.sleep(1)
service_checks = self.check.get_service_checks()
assert service_checks
self.assertTrue(type(service_checks) == type([]))
self.assertTrue(len(service_checks) > 0)
self.assertEquals(len([t for t in service_checks
if t['status']== 0]), 1, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 1]), 2, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 2]), 4, service_checks)
if __name__ == "__main__":
unittest.main()
|
Add tests for process check
|
Add tests for process check
This test call the check method in process 7 times and check the process_check
output. The result should be:
1 OK
2 WARNING
4 CRITICAL
|
Python
|
bsd-3-clause
|
jraede/dd-agent,benmccann/dd-agent,packetloop/dd-agent,PagerDuty/dd-agent,benmccann/dd-agent,truthbk/dd-agent,jshum/dd-agent,citrusleaf/dd-agent,gphat/dd-agent,AniruddhaSAtre/dd-agent,AniruddhaSAtre/dd-agent,jvassev/dd-agent,urosgruber/dd-agent,jraede/dd-agent,Mashape/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,brettlangdon/dd-agent,darron/dd-agent,oneandoneis2/dd-agent,pfmooney/dd-agent,tebriel/dd-agent,gphat/dd-agent,remh/dd-agent,zendesk/dd-agent,brettlangdon/dd-agent,pmav99/praktoras,pmav99/praktoras,jshum/dd-agent,jyogi/purvar-agent,urosgruber/dd-agent,eeroniemi/dd-agent,Shopify/dd-agent,amalakar/dd-agent,packetloop/dd-agent,jamesandariese/dd-agent,benmccann/dd-agent,GabrielNicolasAvellaneda/dd-agent,polynomial/dd-agent,urosgruber/dd-agent,yuecong/dd-agent,gphat/dd-agent,PagerDuty/dd-agent,huhongbo/dd-agent,darron/dd-agent,a20012251/dd-agent,AniruddhaSAtre/dd-agent,oneandoneis2/dd-agent,lookout/dd-agent,darron/dd-agent,AntoCard/powerdns-recursor_check,pfmooney/dd-agent,polynomial/dd-agent,joelvanvelden/dd-agent,eeroniemi/dd-agent,jvassev/dd-agent,tebriel/dd-agent,brettlangdon/dd-agent,c960657/dd-agent,oneandoneis2/dd-agent,amalakar/dd-agent,JohnLZeller/dd-agent,jshum/dd-agent,cberry777/dd-agent,yuecong/dd-agent,pmav99/praktoras,a20012251/dd-agent,pmav99/praktoras,urosgruber/dd-agent,truthbk/dd-agent,cberry777/dd-agent,citrusleaf/dd-agent,jraede/dd-agent,yuecong/dd-agent,a20012251/dd-agent,GabrielNicolasAvellaneda/dd-agent,ess/dd-agent,a20012251/dd-agent,joelvanvelden/dd-agent,GabrielNicolasAvellaneda/dd-agent,lookout/dd-agent,ess/dd-agent,JohnLZeller/dd-agent,darron/dd-agent,relateiq/dd-agent,Mashape/dd-agent,lookout/dd-agent,yuecong/dd-agent,c960657/dd-agent,citrusleaf/dd-agent,remh/dd-agent,pfmooney/dd-agent,tebriel/dd-agent,mderomph-coolblue/dd-agent,JohnLZeller/dd-agent,remh/dd-agent,jshum/dd-agent,Wattpad/dd-agent,zendesk/dd-agent,packetloop/dd-agent,benmccann/dd-agent,eeroniemi/dd-agent,Shopify/dd-agent,ess/dd-agent,jvassev/dd-agent,darron/dd-agent,huhongbo/dd-agent,Shopify/dd-agent,guruxu/dd-agent,guruxu/dd-agent,Shopify/dd-agent,jamesandariese/dd-agent,Wattpad/dd-agent,joelvanvelden/dd-agent,pfmooney/dd-agent,Mashape/dd-agent,manolama/dd-agent,mderomph-coolblue/dd-agent,remh/dd-agent,manolama/dd-agent,polynomial/dd-agent,Wattpad/dd-agent,takus/dd-agent,yuecong/dd-agent,manolama/dd-agent,takus/dd-agent,Mashape/dd-agent,manolama/dd-agent,tebriel/dd-agent,polynomial/dd-agent,gphat/dd-agent,takus/dd-agent,takus/dd-agent,c960657/dd-agent,truthbk/dd-agent,AntoCard/powerdns-recursor_check,cberry777/dd-agent,Wattpad/dd-agent,jyogi/purvar-agent,citrusleaf/dd-agent,Mashape/dd-agent,guruxu/dd-agent,packetloop/dd-agent,indeedops/dd-agent,truthbk/dd-agent,mderomph-coolblue/dd-agent,eeroniemi/dd-agent,jyogi/purvar-agent,tebriel/dd-agent,AniruddhaSAtre/dd-agent,AntoCard/powerdns-recursor_check,relateiq/dd-agent,cberry777/dd-agent,jamesandariese/dd-agent,jvassev/dd-agent,guruxu/dd-agent,urosgruber/dd-agent,joelvanvelden/dd-agent,takus/dd-agent,c960657/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,amalakar/dd-agent,ess/dd-agent,brettlangdon/dd-agent,amalakar/dd-agent,indeedops/dd-agent,lookout/dd-agent,PagerDuty/dd-agent,indeedops/dd-agent,manolama/dd-agent,citrusleaf/dd-agent,zendesk/dd-agent,PagerDuty/dd-agent,jamesandariese/dd-agent,jyogi/purvar-agent,Wattpad/dd-agent,polynomial/dd-agent,indeedops/dd-agent,indeedops/dd-agent,eeroniemi/dd-agent,c960657/dd-agent,mderomph-coolblue/dd-agent,JohnLZeller/dd-agent,pfmooney/dd-agent,jshum/dd-agent,lookout/dd-agent,huhongbo/dd-agent,relateiq/dd-agent,jraede/dd-agent,jraede/dd-agent,zendesk/dd-agent,PagerDuty/dd-agent,gphat/dd-agent,zendesk/dd-agent,ess/dd-agent,Shopify/dd-agent,benmccann/dd-agent,cberry777/dd-agent,AniruddhaSAtre/dd-agent,truthbk/dd-agent,jamesandariese/dd-agent,JohnLZeller/dd-agent,AntoCard/powerdns-recursor_check,mderomph-coolblue/dd-agent,guruxu/dd-agent,amalakar/dd-agent,jyogi/purvar-agent,joelvanvelden/dd-agent,a20012251/dd-agent,packetloop/dd-agent,pmav99/praktoras,relateiq/dd-agent,relateiq/dd-agent,jvassev/dd-agent,AntoCard/powerdns-recursor_check,GabrielNicolasAvellaneda/dd-agent,GabrielNicolasAvellaneda/dd-agent,remh/dd-agent,brettlangdon/dd-agent
|
Add tests for process check
This test call the check method in process 7 times and check the process_check
output. The result should be:
1 OK
2 WARNING
4 CRITICAL
|
import unittest
import logging
import time
from util import get_hostname
from tests.common import load_check
from nose.plugins.attrib import attr
logging.basicConfig()
@attr('process')
class ProcessTestCase(unittest.TestCase):
def build_config(self, config, n):
critical_low = [2, 2, 2, -1, 2, -2, 2]
critical_high = [2, 2, 2, 3, -1, 4, -2]
warning_low = [1, -1, 2, -1, 2, -1, 2]
warning_high = [1, 3, -1, 2, -1, 3, -1]
for i in range(7):
name = 'ssh' + str(i)
config['instances'].append({
'name': name,
'search_string': ['ssh', 'sshd'],
'thresholds': {
'critical': [n - critical_low[i], n + critical_high[i]],
'warning': [n - warning_low[i], n + warning_high[i]]
}
})
return config
def testCheck(self):
config = {
'init_config': {},
'instances': []
}
self.agentConfig = {
'version': '0.1',
'api_key': 'toto'
}
search_string = ['ssh', 'sshd']
self.check = load_check('process', config, self.agentConfig)
pids = self.check.find_pids(search_string)
config = self.build_config(config, len(pids))
for i in range(7):
self.check.check(config['instances'][i])
time.sleep(1)
service_checks = self.check.get_service_checks()
assert service_checks
self.assertTrue(type(service_checks) == type([]))
self.assertTrue(len(service_checks) > 0)
self.assertEquals(len([t for t in service_checks
if t['status']== 0]), 1, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 1]), 2, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 2]), 4, service_checks)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add tests for process check
This test call the check method in process 7 times and check the process_check
output. The result should be:
1 OK
2 WARNING
4 CRITICAL<commit_after>
|
import unittest
import logging
import time
from util import get_hostname
from tests.common import load_check
from nose.plugins.attrib import attr
logging.basicConfig()
@attr('process')
class ProcessTestCase(unittest.TestCase):
def build_config(self, config, n):
critical_low = [2, 2, 2, -1, 2, -2, 2]
critical_high = [2, 2, 2, 3, -1, 4, -2]
warning_low = [1, -1, 2, -1, 2, -1, 2]
warning_high = [1, 3, -1, 2, -1, 3, -1]
for i in range(7):
name = 'ssh' + str(i)
config['instances'].append({
'name': name,
'search_string': ['ssh', 'sshd'],
'thresholds': {
'critical': [n - critical_low[i], n + critical_high[i]],
'warning': [n - warning_low[i], n + warning_high[i]]
}
})
return config
def testCheck(self):
config = {
'init_config': {},
'instances': []
}
self.agentConfig = {
'version': '0.1',
'api_key': 'toto'
}
search_string = ['ssh', 'sshd']
self.check = load_check('process', config, self.agentConfig)
pids = self.check.find_pids(search_string)
config = self.build_config(config, len(pids))
for i in range(7):
self.check.check(config['instances'][i])
time.sleep(1)
service_checks = self.check.get_service_checks()
assert service_checks
self.assertTrue(type(service_checks) == type([]))
self.assertTrue(len(service_checks) > 0)
self.assertEquals(len([t for t in service_checks
if t['status']== 0]), 1, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 1]), 2, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 2]), 4, service_checks)
if __name__ == "__main__":
unittest.main()
|
Add tests for process check
This test call the check method in process 7 times and check the process_check
output. The result should be:
1 OK
2 WARNING
4 CRITICALimport unittest
import logging
import time
from util import get_hostname
from tests.common import load_check
from nose.plugins.attrib import attr
logging.basicConfig()
@attr('process')
class ProcessTestCase(unittest.TestCase):
def build_config(self, config, n):
critical_low = [2, 2, 2, -1, 2, -2, 2]
critical_high = [2, 2, 2, 3, -1, 4, -2]
warning_low = [1, -1, 2, -1, 2, -1, 2]
warning_high = [1, 3, -1, 2, -1, 3, -1]
for i in range(7):
name = 'ssh' + str(i)
config['instances'].append({
'name': name,
'search_string': ['ssh', 'sshd'],
'thresholds': {
'critical': [n - critical_low[i], n + critical_high[i]],
'warning': [n - warning_low[i], n + warning_high[i]]
}
})
return config
def testCheck(self):
config = {
'init_config': {},
'instances': []
}
self.agentConfig = {
'version': '0.1',
'api_key': 'toto'
}
search_string = ['ssh', 'sshd']
self.check = load_check('process', config, self.agentConfig)
pids = self.check.find_pids(search_string)
config = self.build_config(config, len(pids))
for i in range(7):
self.check.check(config['instances'][i])
time.sleep(1)
service_checks = self.check.get_service_checks()
assert service_checks
self.assertTrue(type(service_checks) == type([]))
self.assertTrue(len(service_checks) > 0)
self.assertEquals(len([t for t in service_checks
if t['status']== 0]), 1, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 1]), 2, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 2]), 4, service_checks)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add tests for process check
This test call the check method in process 7 times and check the process_check
output. The result should be:
1 OK
2 WARNING
4 CRITICAL<commit_after>import unittest
import logging
import time
from util import get_hostname
from tests.common import load_check
from nose.plugins.attrib import attr
logging.basicConfig()
@attr('process')
class ProcessTestCase(unittest.TestCase):
def build_config(self, config, n):
critical_low = [2, 2, 2, -1, 2, -2, 2]
critical_high = [2, 2, 2, 3, -1, 4, -2]
warning_low = [1, -1, 2, -1, 2, -1, 2]
warning_high = [1, 3, -1, 2, -1, 3, -1]
for i in range(7):
name = 'ssh' + str(i)
config['instances'].append({
'name': name,
'search_string': ['ssh', 'sshd'],
'thresholds': {
'critical': [n - critical_low[i], n + critical_high[i]],
'warning': [n - warning_low[i], n + warning_high[i]]
}
})
return config
def testCheck(self):
config = {
'init_config': {},
'instances': []
}
self.agentConfig = {
'version': '0.1',
'api_key': 'toto'
}
search_string = ['ssh', 'sshd']
self.check = load_check('process', config, self.agentConfig)
pids = self.check.find_pids(search_string)
config = self.build_config(config, len(pids))
for i in range(7):
self.check.check(config['instances'][i])
time.sleep(1)
service_checks = self.check.get_service_checks()
assert service_checks
self.assertTrue(type(service_checks) == type([]))
self.assertTrue(len(service_checks) > 0)
self.assertEquals(len([t for t in service_checks
if t['status']== 0]), 1, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 1]), 2, service_checks)
self.assertEquals(len([t for t in service_checks
if t['status']== 2]), 4, service_checks)
if __name__ == "__main__":
unittest.main()
|
|
9247d831dfa8ea5b11f870bf3ad75951b8b16891
|
bvspca/animals/wagtail_hooks.py
|
bvspca/animals/wagtail_hooks.py
|
from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from bvspca.animals.models import Animal
class AnimalModelAdmin(ModelAdmin):
model = Animal
menu_label = 'Animals'
menu_icon = 'fa-paw'
menu_order = 100
add_to_settings_menu = False
list_display = ('title', 'petpoint_id', 'live',)
search_fields = ('title', 'description', 'petpoint_id')
list_filter = ('species', 'sex',)
ordering = ('live', '-petpoint_id',)
modeladmin_register(AnimalModelAdmin)
|
Add modeladmin entry for animals
|
Add modeladmin entry for animals
|
Python
|
mit
|
nfletton/bvspca,nfletton/bvspca,nfletton/bvspca,nfletton/bvspca
|
Add modeladmin entry for animals
|
from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from bvspca.animals.models import Animal
class AnimalModelAdmin(ModelAdmin):
model = Animal
menu_label = 'Animals'
menu_icon = 'fa-paw'
menu_order = 100
add_to_settings_menu = False
list_display = ('title', 'petpoint_id', 'live',)
search_fields = ('title', 'description', 'petpoint_id')
list_filter = ('species', 'sex',)
ordering = ('live', '-petpoint_id',)
modeladmin_register(AnimalModelAdmin)
|
<commit_before><commit_msg>Add modeladmin entry for animals<commit_after>
|
from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from bvspca.animals.models import Animal
class AnimalModelAdmin(ModelAdmin):
model = Animal
menu_label = 'Animals'
menu_icon = 'fa-paw'
menu_order = 100
add_to_settings_menu = False
list_display = ('title', 'petpoint_id', 'live',)
search_fields = ('title', 'description', 'petpoint_id')
list_filter = ('species', 'sex',)
ordering = ('live', '-petpoint_id',)
modeladmin_register(AnimalModelAdmin)
|
Add modeladmin entry for animalsfrom wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from bvspca.animals.models import Animal
class AnimalModelAdmin(ModelAdmin):
model = Animal
menu_label = 'Animals'
menu_icon = 'fa-paw'
menu_order = 100
add_to_settings_menu = False
list_display = ('title', 'petpoint_id', 'live',)
search_fields = ('title', 'description', 'petpoint_id')
list_filter = ('species', 'sex',)
ordering = ('live', '-petpoint_id',)
modeladmin_register(AnimalModelAdmin)
|
<commit_before><commit_msg>Add modeladmin entry for animals<commit_after>from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from bvspca.animals.models import Animal
class AnimalModelAdmin(ModelAdmin):
model = Animal
menu_label = 'Animals'
menu_icon = 'fa-paw'
menu_order = 100
add_to_settings_menu = False
list_display = ('title', 'petpoint_id', 'live',)
search_fields = ('title', 'description', 'petpoint_id')
list_filter = ('species', 'sex',)
ordering = ('live', '-petpoint_id',)
modeladmin_register(AnimalModelAdmin)
|
|
1f5b624e7fac4883f8a86305364f55af0703bb32
|
images/minimal/ipython_notebook_config.py
|
images/minimal/ipython_notebook_config.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
|
Set up an IPython config for the minimal image
|
Set up an IPython config for the minimal image
|
Python
|
bsd-3-clause
|
ianabc/tmpnb,zischwartz/tmpnb,captainsafia/tmpnb,zischwartz/tmpnb,malev/tmpnb,parente/tmpnb,malev/tmpnb,iamjakob/tmpnb,rgbkrk/tmpnb,betatim/tmpnb,jupyter/tmpnb,cannin/tmpnb,captainsafia/tmpnb,willjharmer/tmpnb,captainsafia/tmpnb,marscher/tmpnb,parente/tmpnb,ianabc/tmpnb,jupyter/tmpnb,cannin/tmpnb,willjharmer/tmpnb,betatim/tmpnb,zischwartz/tmpnb,jupyter/tmpnb,rgbkrk/tmpnb,malev/tmpnb,jupyter/tmpnb,willjharmer/tmpnb,betatim/tmpnb,cannin/tmpnb,marscher/tmpnb,ianabc/tmpnb,rgbkrk/tmpnb,iamjakob/tmpnb,parente/tmpnb,iamjakob/tmpnb,parente/tmpnb,cannin/tmpnb,marscher/tmpnb,marscher/tmpnb,ianabc/tmpnb
|
Set up an IPython config for the minimal image
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
|
<commit_before><commit_msg>Set up an IPython config for the minimal image<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
|
Set up an IPython config for the minimal image#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
|
<commit_before><commit_msg>Set up an IPython config for the minimal image<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
|
|
11aac46ef2e3fc867e1879ae8e7bc77e5e0aedc2
|
test/session_lock_test.py
|
test/session_lock_test.py
|
from infiniteworld import SessionLockLost, MCInfdevOldLevel
from templevel import TempLevel
import unittest
class SessionLockTest(unittest.TestCase):
def test_session_lock(self):
temp = TempLevel("AnvilWorld")
level = temp.level
level2 = MCInfdevOldLevel(level.filename)
def touch():
level.saveInPlace()
self.assertRaises(SessionLockLost, touch)
|
Add quick test for session locks.
|
Tests: Add quick test for session locks.
|
Python
|
isc
|
mcedit/pymclevel,arruda/pymclevel,ahh2131/mchisel,mcedit/pymclevel,arruda/pymclevel,ahh2131/mchisel
|
Tests: Add quick test for session locks.
|
from infiniteworld import SessionLockLost, MCInfdevOldLevel
from templevel import TempLevel
import unittest
class SessionLockTest(unittest.TestCase):
def test_session_lock(self):
temp = TempLevel("AnvilWorld")
level = temp.level
level2 = MCInfdevOldLevel(level.filename)
def touch():
level.saveInPlace()
self.assertRaises(SessionLockLost, touch)
|
<commit_before><commit_msg>Tests: Add quick test for session locks.<commit_after>
|
from infiniteworld import SessionLockLost, MCInfdevOldLevel
from templevel import TempLevel
import unittest
class SessionLockTest(unittest.TestCase):
def test_session_lock(self):
temp = TempLevel("AnvilWorld")
level = temp.level
level2 = MCInfdevOldLevel(level.filename)
def touch():
level.saveInPlace()
self.assertRaises(SessionLockLost, touch)
|
Tests: Add quick test for session locks.from infiniteworld import SessionLockLost, MCInfdevOldLevel
from templevel import TempLevel
import unittest
class SessionLockTest(unittest.TestCase):
def test_session_lock(self):
temp = TempLevel("AnvilWorld")
level = temp.level
level2 = MCInfdevOldLevel(level.filename)
def touch():
level.saveInPlace()
self.assertRaises(SessionLockLost, touch)
|
<commit_before><commit_msg>Tests: Add quick test for session locks.<commit_after>from infiniteworld import SessionLockLost, MCInfdevOldLevel
from templevel import TempLevel
import unittest
class SessionLockTest(unittest.TestCase):
def test_session_lock(self):
temp = TempLevel("AnvilWorld")
level = temp.level
level2 = MCInfdevOldLevel(level.filename)
def touch():
level.saveInPlace()
self.assertRaises(SessionLockLost, touch)
|
|
92f41cf20097de534d8ffe8e85cd29f9e292946e
|
utils/sort_score_files.py
|
utils/sort_score_files.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Make statistics on score files (stored in JSON files).
"""
import argparse
import json
import math
import numpy as np
import operator
def parse_json_file(json_file_path):
with open(json_file_path, "r") as fd:
json_data = json.load(fd)
return json_data
def extract_score_list(json_dict, score_index):
io_list = json_dict["io"]
json_data = [image_dict["score"][score_index] for image_dict in io_list if "score" in image_dict]
return json_data
if __name__ == '__main__':
# PARSE OPTIONS ###########################################################
parser = argparse.ArgumentParser(description="Make statistics on score files (JSON files).")
parser.add_argument("--index", "-i", type=int, default=0, metavar="INT",
help="The index of the score to plot in case of multivalued scores")
parser.add_argument("fileargs", nargs="+", metavar="FILE",
help="The JSON file to process")
args = parser.parse_args()
json_file_path_list = args.fileargs
score_index = args.index
# FETCH SCORE #############################################################
data_dict = {}
error_list = []
for json_file_path in json_file_path_list:
try:
json_dict = parse_json_file(json_file_path)
score_array = np.array(extract_score_list(json_dict, score_index))
mean_score = score_array.mean()
if math.isnan(mean_score):
error_list.append(json_file_path)
else:
data_dict[json_file_path] = mean_score
except:
error_list.append(json_file_path)
print("ERRORS")
for path in error_list:
print(path)
print("")
print("SORTED SCORES")
for path, mean in sorted(data_dict.items(), key=operator.itemgetter(1), reverse=True):
print(mean, path)
|
Add a script to sort score files according to their mean scores.
|
Add a script to sort score files according to their mean scores.
|
Python
|
mit
|
jdhp-sap/data-pipeline-standalone-scripts,jdhp-sap/sap-cta-data-pipeline,jdhp-sap/data-pipeline-standalone-scripts,jdhp-sap/sap-cta-data-pipeline
|
Add a script to sort score files according to their mean scores.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Make statistics on score files (stored in JSON files).
"""
import argparse
import json
import math
import numpy as np
import operator
def parse_json_file(json_file_path):
with open(json_file_path, "r") as fd:
json_data = json.load(fd)
return json_data
def extract_score_list(json_dict, score_index):
io_list = json_dict["io"]
json_data = [image_dict["score"][score_index] for image_dict in io_list if "score" in image_dict]
return json_data
if __name__ == '__main__':
# PARSE OPTIONS ###########################################################
parser = argparse.ArgumentParser(description="Make statistics on score files (JSON files).")
parser.add_argument("--index", "-i", type=int, default=0, metavar="INT",
help="The index of the score to plot in case of multivalued scores")
parser.add_argument("fileargs", nargs="+", metavar="FILE",
help="The JSON file to process")
args = parser.parse_args()
json_file_path_list = args.fileargs
score_index = args.index
# FETCH SCORE #############################################################
data_dict = {}
error_list = []
for json_file_path in json_file_path_list:
try:
json_dict = parse_json_file(json_file_path)
score_array = np.array(extract_score_list(json_dict, score_index))
mean_score = score_array.mean()
if math.isnan(mean_score):
error_list.append(json_file_path)
else:
data_dict[json_file_path] = mean_score
except:
error_list.append(json_file_path)
print("ERRORS")
for path in error_list:
print(path)
print("")
print("SORTED SCORES")
for path, mean in sorted(data_dict.items(), key=operator.itemgetter(1), reverse=True):
print(mean, path)
|
<commit_before><commit_msg>Add a script to sort score files according to their mean scores.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Make statistics on score files (stored in JSON files).
"""
import argparse
import json
import math
import numpy as np
import operator
def parse_json_file(json_file_path):
with open(json_file_path, "r") as fd:
json_data = json.load(fd)
return json_data
def extract_score_list(json_dict, score_index):
io_list = json_dict["io"]
json_data = [image_dict["score"][score_index] for image_dict in io_list if "score" in image_dict]
return json_data
if __name__ == '__main__':
# PARSE OPTIONS ###########################################################
parser = argparse.ArgumentParser(description="Make statistics on score files (JSON files).")
parser.add_argument("--index", "-i", type=int, default=0, metavar="INT",
help="The index of the score to plot in case of multivalued scores")
parser.add_argument("fileargs", nargs="+", metavar="FILE",
help="The JSON file to process")
args = parser.parse_args()
json_file_path_list = args.fileargs
score_index = args.index
# FETCH SCORE #############################################################
data_dict = {}
error_list = []
for json_file_path in json_file_path_list:
try:
json_dict = parse_json_file(json_file_path)
score_array = np.array(extract_score_list(json_dict, score_index))
mean_score = score_array.mean()
if math.isnan(mean_score):
error_list.append(json_file_path)
else:
data_dict[json_file_path] = mean_score
except:
error_list.append(json_file_path)
print("ERRORS")
for path in error_list:
print(path)
print("")
print("SORTED SCORES")
for path, mean in sorted(data_dict.items(), key=operator.itemgetter(1), reverse=True):
print(mean, path)
|
Add a script to sort score files according to their mean scores.#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Make statistics on score files (stored in JSON files).
"""
import argparse
import json
import math
import numpy as np
import operator
def parse_json_file(json_file_path):
with open(json_file_path, "r") as fd:
json_data = json.load(fd)
return json_data
def extract_score_list(json_dict, score_index):
io_list = json_dict["io"]
json_data = [image_dict["score"][score_index] for image_dict in io_list if "score" in image_dict]
return json_data
if __name__ == '__main__':
# PARSE OPTIONS ###########################################################
parser = argparse.ArgumentParser(description="Make statistics on score files (JSON files).")
parser.add_argument("--index", "-i", type=int, default=0, metavar="INT",
help="The index of the score to plot in case of multivalued scores")
parser.add_argument("fileargs", nargs="+", metavar="FILE",
help="The JSON file to process")
args = parser.parse_args()
json_file_path_list = args.fileargs
score_index = args.index
# FETCH SCORE #############################################################
data_dict = {}
error_list = []
for json_file_path in json_file_path_list:
try:
json_dict = parse_json_file(json_file_path)
score_array = np.array(extract_score_list(json_dict, score_index))
mean_score = score_array.mean()
if math.isnan(mean_score):
error_list.append(json_file_path)
else:
data_dict[json_file_path] = mean_score
except:
error_list.append(json_file_path)
print("ERRORS")
for path in error_list:
print(path)
print("")
print("SORTED SCORES")
for path, mean in sorted(data_dict.items(), key=operator.itemgetter(1), reverse=True):
print(mean, path)
|
<commit_before><commit_msg>Add a script to sort score files according to their mean scores.<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Make statistics on score files (stored in JSON files).
"""
import argparse
import json
import math
import numpy as np
import operator
def parse_json_file(json_file_path):
with open(json_file_path, "r") as fd:
json_data = json.load(fd)
return json_data
def extract_score_list(json_dict, score_index):
io_list = json_dict["io"]
json_data = [image_dict["score"][score_index] for image_dict in io_list if "score" in image_dict]
return json_data
if __name__ == '__main__':
# PARSE OPTIONS ###########################################################
parser = argparse.ArgumentParser(description="Make statistics on score files (JSON files).")
parser.add_argument("--index", "-i", type=int, default=0, metavar="INT",
help="The index of the score to plot in case of multivalued scores")
parser.add_argument("fileargs", nargs="+", metavar="FILE",
help="The JSON file to process")
args = parser.parse_args()
json_file_path_list = args.fileargs
score_index = args.index
# FETCH SCORE #############################################################
data_dict = {}
error_list = []
for json_file_path in json_file_path_list:
try:
json_dict = parse_json_file(json_file_path)
score_array = np.array(extract_score_list(json_dict, score_index))
mean_score = score_array.mean()
if math.isnan(mean_score):
error_list.append(json_file_path)
else:
data_dict[json_file_path] = mean_score
except:
error_list.append(json_file_path)
print("ERRORS")
for path in error_list:
print(path)
print("")
print("SORTED SCORES")
for path, mean in sorted(data_dict.items(), key=operator.itemgetter(1), reverse=True):
print(mean, path)
|
|
31a9789668e9de589f8a4a181b01733c1854e97d
|
src/people/management/commands/draw_people.py
|
src/people/management/commands/draw_people.py
|
import sys
import argparse
import csv
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from people.models import Person
class Command(BaseCommand):
help = "Draw people at random amongst people who volunteered for it, while ensuring parity between women/men" \
" and fair representation of people who selected 'Other/Not defined' as gender"
requires_migrations_checks = True
def add_arguments(self, parser):
parser.add_argument(
'-o', '--output',
dest='outfile', type=argparse.FileType('w'), default=sys.stdout,
help='Csv file to which the list will be exported'
)
parser.add_argument(
'draw_count',
type=int
)
def handle(self, draw_count, outfile, **kwargs):
# setting order_by 'gender' so that the created field is not included in the groupby clause
if draw_count % 2 != 0:
raise CommandError('Number of persons to draw is not even.')
counts = {
d['gender']: d['c'] for d in
Person.objects.filter(draw_participation=True).order_by('gender').values('gender').annotate(c=Count('gender'))
}
total_count = sum(counts[g] for g in [Person.GENDER_FEMALE, Person.GENDER_MALE, Person.GENDER_OTHER])
other_draw_count = round(draw_count * counts[Person.GENDER_OTHER] / total_count / 2) * 2
gendered_draw_count = (draw_count - other_draw_count) / 2
if counts[Person.GENDER_MALE] < gendered_draw_count or counts[Person.GENDER_FEMALE] < gendered_draw_count:
raise CommandError("Not enough volunteers for drawing with parity")
participants = Person.objects.none()
# DRAWING HAPPENS HERE
for g, n in {Person.GENDER_FEMALE: gendered_draw_count, Person.GENDER_MALE: gendered_draw_count, Person.GENDER_OTHER: other_draw_count}.items():
participants = participants.union(
Person.objects.filter(draw_participation=True, gender=g).order_by('?')[:n]
)
writer = csv.DictWriter(outfile, fieldnames=['numero', 'id', 'email', 'gender'])
writer.writeheader()
for numero, p in enumerate(participants):
writer.writerow({'numero': numero, 'id': p.id, 'email': p.email, 'gender': p.gender})
|
Add command to draw people
|
Add command to draw people
|
Python
|
agpl-3.0
|
lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django
|
Add command to draw people
|
import sys
import argparse
import csv
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from people.models import Person
class Command(BaseCommand):
help = "Draw people at random amongst people who volunteered for it, while ensuring parity between women/men" \
" and fair representation of people who selected 'Other/Not defined' as gender"
requires_migrations_checks = True
def add_arguments(self, parser):
parser.add_argument(
'-o', '--output',
dest='outfile', type=argparse.FileType('w'), default=sys.stdout,
help='Csv file to which the list will be exported'
)
parser.add_argument(
'draw_count',
type=int
)
def handle(self, draw_count, outfile, **kwargs):
# setting order_by 'gender' so that the created field is not included in the groupby clause
if draw_count % 2 != 0:
raise CommandError('Number of persons to draw is not even.')
counts = {
d['gender']: d['c'] for d in
Person.objects.filter(draw_participation=True).order_by('gender').values('gender').annotate(c=Count('gender'))
}
total_count = sum(counts[g] for g in [Person.GENDER_FEMALE, Person.GENDER_MALE, Person.GENDER_OTHER])
other_draw_count = round(draw_count * counts[Person.GENDER_OTHER] / total_count / 2) * 2
gendered_draw_count = (draw_count - other_draw_count) / 2
if counts[Person.GENDER_MALE] < gendered_draw_count or counts[Person.GENDER_FEMALE] < gendered_draw_count:
raise CommandError("Not enough volunteers for drawing with parity")
participants = Person.objects.none()
# DRAWING HAPPENS HERE
for g, n in {Person.GENDER_FEMALE: gendered_draw_count, Person.GENDER_MALE: gendered_draw_count, Person.GENDER_OTHER: other_draw_count}.items():
participants = participants.union(
Person.objects.filter(draw_participation=True, gender=g).order_by('?')[:n]
)
writer = csv.DictWriter(outfile, fieldnames=['numero', 'id', 'email', 'gender'])
writer.writeheader()
for numero, p in enumerate(participants):
writer.writerow({'numero': numero, 'id': p.id, 'email': p.email, 'gender': p.gender})
|
<commit_before><commit_msg>Add command to draw people<commit_after>
|
import sys
import argparse
import csv
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from people.models import Person
class Command(BaseCommand):
help = "Draw people at random amongst people who volunteered for it, while ensuring parity between women/men" \
" and fair representation of people who selected 'Other/Not defined' as gender"
requires_migrations_checks = True
def add_arguments(self, parser):
parser.add_argument(
'-o', '--output',
dest='outfile', type=argparse.FileType('w'), default=sys.stdout,
help='Csv file to which the list will be exported'
)
parser.add_argument(
'draw_count',
type=int
)
def handle(self, draw_count, outfile, **kwargs):
# setting order_by 'gender' so that the created field is not included in the groupby clause
if draw_count % 2 != 0:
raise CommandError('Number of persons to draw is not even.')
counts = {
d['gender']: d['c'] for d in
Person.objects.filter(draw_participation=True).order_by('gender').values('gender').annotate(c=Count('gender'))
}
total_count = sum(counts[g] for g in [Person.GENDER_FEMALE, Person.GENDER_MALE, Person.GENDER_OTHER])
other_draw_count = round(draw_count * counts[Person.GENDER_OTHER] / total_count / 2) * 2
gendered_draw_count = (draw_count - other_draw_count) / 2
if counts[Person.GENDER_MALE] < gendered_draw_count or counts[Person.GENDER_FEMALE] < gendered_draw_count:
raise CommandError("Not enough volunteers for drawing with parity")
participants = Person.objects.none()
# DRAWING HAPPENS HERE
for g, n in {Person.GENDER_FEMALE: gendered_draw_count, Person.GENDER_MALE: gendered_draw_count, Person.GENDER_OTHER: other_draw_count}.items():
participants = participants.union(
Person.objects.filter(draw_participation=True, gender=g).order_by('?')[:n]
)
writer = csv.DictWriter(outfile, fieldnames=['numero', 'id', 'email', 'gender'])
writer.writeheader()
for numero, p in enumerate(participants):
writer.writerow({'numero': numero, 'id': p.id, 'email': p.email, 'gender': p.gender})
|
Add command to draw peopleimport sys
import argparse
import csv
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from people.models import Person
class Command(BaseCommand):
help = "Draw people at random amongst people who volunteered for it, while ensuring parity between women/men" \
" and fair representation of people who selected 'Other/Not defined' as gender"
requires_migrations_checks = True
def add_arguments(self, parser):
parser.add_argument(
'-o', '--output',
dest='outfile', type=argparse.FileType('w'), default=sys.stdout,
help='Csv file to which the list will be exported'
)
parser.add_argument(
'draw_count',
type=int
)
def handle(self, draw_count, outfile, **kwargs):
# setting order_by 'gender' so that the created field is not included in the groupby clause
if draw_count % 2 != 0:
raise CommandError('Number of persons to draw is not even.')
counts = {
d['gender']: d['c'] for d in
Person.objects.filter(draw_participation=True).order_by('gender').values('gender').annotate(c=Count('gender'))
}
total_count = sum(counts[g] for g in [Person.GENDER_FEMALE, Person.GENDER_MALE, Person.GENDER_OTHER])
other_draw_count = round(draw_count * counts[Person.GENDER_OTHER] / total_count / 2) * 2
gendered_draw_count = (draw_count - other_draw_count) / 2
if counts[Person.GENDER_MALE] < gendered_draw_count or counts[Person.GENDER_FEMALE] < gendered_draw_count:
raise CommandError("Not enough volunteers for drawing with parity")
participants = Person.objects.none()
# DRAWING HAPPENS HERE
for g, n in {Person.GENDER_FEMALE: gendered_draw_count, Person.GENDER_MALE: gendered_draw_count, Person.GENDER_OTHER: other_draw_count}.items():
participants = participants.union(
Person.objects.filter(draw_participation=True, gender=g).order_by('?')[:n]
)
writer = csv.DictWriter(outfile, fieldnames=['numero', 'id', 'email', 'gender'])
writer.writeheader()
for numero, p in enumerate(participants):
writer.writerow({'numero': numero, 'id': p.id, 'email': p.email, 'gender': p.gender})
|
<commit_before><commit_msg>Add command to draw people<commit_after>import sys
import argparse
import csv
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from people.models import Person
class Command(BaseCommand):
help = "Draw people at random amongst people who volunteered for it, while ensuring parity between women/men" \
" and fair representation of people who selected 'Other/Not defined' as gender"
requires_migrations_checks = True
def add_arguments(self, parser):
parser.add_argument(
'-o', '--output',
dest='outfile', type=argparse.FileType('w'), default=sys.stdout,
help='Csv file to which the list will be exported'
)
parser.add_argument(
'draw_count',
type=int
)
def handle(self, draw_count, outfile, **kwargs):
# setting order_by 'gender' so that the created field is not included in the groupby clause
if draw_count % 2 != 0:
raise CommandError('Number of persons to draw is not even.')
counts = {
d['gender']: d['c'] for d in
Person.objects.filter(draw_participation=True).order_by('gender').values('gender').annotate(c=Count('gender'))
}
total_count = sum(counts[g] for g in [Person.GENDER_FEMALE, Person.GENDER_MALE, Person.GENDER_OTHER])
other_draw_count = round(draw_count * counts[Person.GENDER_OTHER] / total_count / 2) * 2
gendered_draw_count = (draw_count - other_draw_count) / 2
if counts[Person.GENDER_MALE] < gendered_draw_count or counts[Person.GENDER_FEMALE] < gendered_draw_count:
raise CommandError("Not enough volunteers for drawing with parity")
participants = Person.objects.none()
# DRAWING HAPPENS HERE
for g, n in {Person.GENDER_FEMALE: gendered_draw_count, Person.GENDER_MALE: gendered_draw_count, Person.GENDER_OTHER: other_draw_count}.items():
participants = participants.union(
Person.objects.filter(draw_participation=True, gender=g).order_by('?')[:n]
)
writer = csv.DictWriter(outfile, fieldnames=['numero', 'id', 'email', 'gender'])
writer.writeheader()
for numero, p in enumerate(participants):
writer.writerow({'numero': numero, 'id': p.id, 'email': p.email, 'gender': p.gender})
|
|
aef53625ebedd9e2af7676014910e8a98de46f96
|
crowdgezwitscher/twitter/migrations/0007_auto_20180121_2232.py
|
crowdgezwitscher/twitter/migrations/0007_auto_20180121_2232.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-01-21 22:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('twitter', '0006_auto_20170408_2226'),
]
operations = [
migrations.AlterField(
model_name='tweet',
name='content',
field=models.CharField(max_length=560),
),
migrations.AlterField(
model_name='tweet',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
Add (forgotten) migration for Twitter model changes.
|
Add (forgotten) migration for Twitter model changes.
|
Python
|
mit
|
Strassengezwitscher/Strassengezwitscher,Strassengezwitscher/Strassengezwitscher,Strassengezwitscher/Strassengezwitscher,Strassengezwitscher/Strassengezwitscher,Strassengezwitscher/Strassengezwitscher
|
Add (forgotten) migration for Twitter model changes.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-01-21 22:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('twitter', '0006_auto_20170408_2226'),
]
operations = [
migrations.AlterField(
model_name='tweet',
name='content',
field=models.CharField(max_length=560),
),
migrations.AlterField(
model_name='tweet',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
<commit_before><commit_msg>Add (forgotten) migration for Twitter model changes.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-01-21 22:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('twitter', '0006_auto_20170408_2226'),
]
operations = [
migrations.AlterField(
model_name='tweet',
name='content',
field=models.CharField(max_length=560),
),
migrations.AlterField(
model_name='tweet',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
Add (forgotten) migration for Twitter model changes.# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-01-21 22:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('twitter', '0006_auto_20170408_2226'),
]
operations = [
migrations.AlterField(
model_name='tweet',
name='content',
field=models.CharField(max_length=560),
),
migrations.AlterField(
model_name='tweet',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
<commit_before><commit_msg>Add (forgotten) migration for Twitter model changes.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-01-21 22:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('twitter', '0006_auto_20170408_2226'),
]
operations = [
migrations.AlterField(
model_name='tweet',
name='content',
field=models.CharField(max_length=560),
),
migrations.AlterField(
model_name='tweet',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
|
|
495c922c221125bc86ca8a75b03e8c8738c41a7f
|
test/command_line/tst_hot_pixel_mask_to_xy.py
|
test/command_line/tst_hot_pixel_mask_to_xy.py
|
from __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise_hot_pixel_mask_to_xy():
if not have_dials_regression:
print "Skipping exercise_hot_pixel_mask_to_xy(): dials_regression not available."
return
data_file = os.path.join(dials_regression, "i23", "hot_pixel_mask",
"hot_mask_0.pickle")
commands = ["dev.dials.hot_pixel_mask_to_xy", data_file]
command = " ".join(commands)
result = easy_run.fully_buffered(command=command).raise_if_errors()
return
def run():
exercise_hot_pixel_mask_to_xy()
return
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
print "OK"
|
Test for jiffy to get hot pixel coordinates as x, y
|
Test for jiffy to get hot pixel coordinates as x, y
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
Test for jiffy to get hot pixel coordinates as x, y
|
from __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise_hot_pixel_mask_to_xy():
if not have_dials_regression:
print "Skipping exercise_hot_pixel_mask_to_xy(): dials_regression not available."
return
data_file = os.path.join(dials_regression, "i23", "hot_pixel_mask",
"hot_mask_0.pickle")
commands = ["dev.dials.hot_pixel_mask_to_xy", data_file]
command = " ".join(commands)
result = easy_run.fully_buffered(command=command).raise_if_errors()
return
def run():
exercise_hot_pixel_mask_to_xy()
return
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
print "OK"
|
<commit_before><commit_msg>Test for jiffy to get hot pixel coordinates as x, y<commit_after>
|
from __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise_hot_pixel_mask_to_xy():
if not have_dials_regression:
print "Skipping exercise_hot_pixel_mask_to_xy(): dials_regression not available."
return
data_file = os.path.join(dials_regression, "i23", "hot_pixel_mask",
"hot_mask_0.pickle")
commands = ["dev.dials.hot_pixel_mask_to_xy", data_file]
command = " ".join(commands)
result = easy_run.fully_buffered(command=command).raise_if_errors()
return
def run():
exercise_hot_pixel_mask_to_xy()
return
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
print "OK"
|
Test for jiffy to get hot pixel coordinates as x, yfrom __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise_hot_pixel_mask_to_xy():
if not have_dials_regression:
print "Skipping exercise_hot_pixel_mask_to_xy(): dials_regression not available."
return
data_file = os.path.join(dials_regression, "i23", "hot_pixel_mask",
"hot_mask_0.pickle")
commands = ["dev.dials.hot_pixel_mask_to_xy", data_file]
command = " ".join(commands)
result = easy_run.fully_buffered(command=command).raise_if_errors()
return
def run():
exercise_hot_pixel_mask_to_xy()
return
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
print "OK"
|
<commit_before><commit_msg>Test for jiffy to get hot pixel coordinates as x, y<commit_after>from __future__ import division
import os
import libtbx.load_env
from libtbx import easy_run
have_dials_regression = libtbx.env.has_module("dials_regression")
if have_dials_regression:
dials_regression = libtbx.env.find_in_repositories(
relative_path="dials_regression",
test=os.path.isdir)
def exercise_hot_pixel_mask_to_xy():
if not have_dials_regression:
print "Skipping exercise_hot_pixel_mask_to_xy(): dials_regression not available."
return
data_file = os.path.join(dials_regression, "i23", "hot_pixel_mask",
"hot_mask_0.pickle")
commands = ["dev.dials.hot_pixel_mask_to_xy", data_file]
command = " ".join(commands)
result = easy_run.fully_buffered(command=command).raise_if_errors()
return
def run():
exercise_hot_pixel_mask_to_xy()
return
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
run()
print "OK"
|
|
a590e100a23a0c225467b34b7c4481ece45905ad
|
tests/test_shells/postproc.py
|
tests/test_shells/postproc.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
Fix functional shell tests in travis
|
Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.
|
Python
|
mit
|
Liangjianghao/powerline,bartvm/powerline,lukw00/powerline,kenrachynski/powerline,Luffin/powerline,DoctorJellyface/powerline,Luffin/powerline,areteix/powerline,cyrixhero/powerline,seanfisk/powerline,EricSB/powerline,dragon788/powerline,blindFS/powerline,magus424/powerline,wfscheper/powerline,dragon788/powerline,wfscheper/powerline,cyrixhero/powerline,keelerm84/powerline,Luffin/powerline,kenrachynski/powerline,seanfisk/powerline,russellb/powerline,IvanAli/powerline,xfumihiro/powerline,russellb/powerline,magus424/powerline,seanfisk/powerline,bartvm/powerline,DoctorJellyface/powerline,darac/powerline,EricSB/powerline,cyrixhero/powerline,firebitsbr/powerline,junix/powerline,areteix/powerline,S0lll0s/powerline,prvnkumar/powerline,QuLogic/powerline,bezhermoso/powerline,prvnkumar/powerline,kenrachynski/powerline,xfumihiro/powerline,firebitsbr/powerline,bartvm/powerline,Liangjianghao/powerline,EricSB/powerline,s0undt3ch/powerline,darac/powerline,russellb/powerline,Liangjianghao/powerline,dragon788/powerline,S0lll0s/powerline,QuLogic/powerline,firebitsbr/powerline,s0undt3ch/powerline,lukw00/powerline,s0undt3ch/powerline,xxxhycl2010/powerline,junix/powerline,darac/powerline,blindFS/powerline,prvnkumar/powerline,IvanAli/powerline,junix/powerline,bezhermoso/powerline,S0lll0s/powerline,wfscheper/powerline,IvanAli/powerline,xxxhycl2010/powerline,xfumihiro/powerline,DoctorJellyface/powerline,bezhermoso/powerline,areteix/powerline,magus424/powerline,lukw00/powerline,xxxhycl2010/powerline,blindFS/powerline,QuLogic/powerline,keelerm84/powerline
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
<commit_before>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
<commit_msg>Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.<commit_after>
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
<commit_before>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
<commit_msg>Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.<commit_after>#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.