_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q41400 | HTTPClient.prepare_http_request | train | def prepare_http_request(self, method_type, params, **kwargs):
"""
Prepares the HTTP REQUEST and returns it.
Args:
method_type: The HTTP method type
params: Additional parameters for the HTTP request.
kwargs: Any extra keyword arguements passed into a client method.
returns:
prepared_request: An HTTP request object.
"""
prepared_request = self.session.prepare_request(
requests.Request(method=method_type, **params)
)
return prepared_request | python | {
"resource": ""
} |
q41401 | HTTPClient.call_api | train | def call_api(self, method_type, method_name,
valid_status_codes, resource, data,
uid, **kwargs):
"""
Make HTTP calls.
Args:
method_type: The HTTP method
method_name: The name of the python method making the HTTP call
valid_status_codes: A tuple of integer status codes
deemed acceptable as response statuses
resource: The resource class that will be generated
data: The post data being sent.
uid: The unique identifier of the resource.
Returns:
kwargs is a list of keyword arguments. Additional custom keyword
arguments can be sent into this method and will be passed into
subclass methods:
- get_url
- prepare_http_request
- get_http_headers
"""
url = resource.get_resource_url(
resource, base_url=self.Meta.base_url
)
if method_type in SINGLE_RESOURCE_METHODS:
if not uid and not kwargs:
raise MissingUidException
url = resource.get_url(
url=url, uid=uid, **kwargs)
params = {
'headers': self.get_http_headers(
self.Meta.name, method_name, **kwargs),
'url': url
}
if method_type in ['POST', 'PUT', 'PATCH'] and isinstance(data, dict):
params.update(json=data)
prepared_request = self.prepare_http_request(
method_type, params, **kwargs)
response = self.session.send(prepared_request)
return self._handle_response(response, valid_status_codes, resource) | python | {
"resource": ""
} |
q41402 | HTTPClient._handle_response | train | def _handle_response(self, response, valid_status_codes, resource):
"""
Handles Response objects
Args:
response: An HTTP reponse object
valid_status_codes: A tuple list of valid status codes
resource: The resource class to build from this response
returns:
resources: A list of Resource instances
"""
if response.status_code not in valid_status_codes:
raise InvalidStatusCodeError(
status_code=response.status_code,
expected_status_codes=valid_status_codes
)
if response.content:
data = response.json()
if isinstance(data, list):
# A list of results is always rendered
return [resource(**x) for x in data]
else:
# Try and find the paginated resources
key = getattr(resource.Meta, 'pagination_key', None)
if isinstance(data.get(key), list):
# Only return the paginated responses
return [resource(**x) for x in data.get(key)]
else:
# Attempt to render this whole response as a resource
return [resource(**data)]
return [] | python | {
"resource": ""
} |
q41403 | HTTPHypermediaClient._call_api_single_related_resource | train | def _call_api_single_related_resource(self, resource, full_resource_url,
method_name, **kwargs):
"""
For HypermediaResource - make an API call to a known URL
"""
url = full_resource_url
params = {
'headers': self.get_http_headers(
resource.Meta.name, method_name, **kwargs),
'url': url
}
prepared_request = self.prepare_http_request(
'GET', params, **kwargs)
response = self.session.send(prepared_request)
return self._handle_response(
response, resource.Meta.valid_status_codes, resource) | python | {
"resource": ""
} |
q41404 | HTTPHypermediaClient._call_api_many_related_resources | train | def _call_api_many_related_resources(self, resource, url_list,
method_name, **kwargs):
"""
For HypermediaResource - make an API call to a list of known URLs
"""
responses = []
for url in url_list:
params = {
'headers': self.get_http_headers(
resource.Meta.name, method_name, **kwargs),
'url': url
}
prepared_request = self.prepare_http_request(
'GET', params, **kwargs)
response = self.session.send(prepared_request)
result = self._handle_response(
response, resource.Meta.valid_status_codes, resource)
if len(result) > 1:
responses.append(result)
else:
responses.append(result[0])
return responses | python | {
"resource": ""
} |
q41405 | BaseClient.assign_methods | train | def assign_methods(self, resource_class):
"""
Given a resource_class and it's Meta.methods tuple,
assign methods for communicating with that resource.
Args:
resource_class: A single resource class
"""
assert all([
x.upper() in VALID_METHODS for x in resource_class.Meta.methods])
for method in resource_class.Meta.methods:
self._assign_method(
resource_class,
method.upper()
) | python | {
"resource": ""
} |
q41406 | BaseClient._assign_method | train | def _assign_method(self, resource_class, method_type):
"""
Using reflection, assigns a new method to this class.
Args:
resource_class: A resource class
method_type: The HTTP method type
"""
"""
If we assigned the same method to each method, it's the same
method in memory, so we need one for each acceptable HTTP method.
"""
method_name = resource_class.get_method_name(
resource_class, method_type)
valid_status_codes = getattr(
resource_class.Meta,
'valid_status_codes',
DEFAULT_VALID_STATUS_CODES
)
# I know what you're going to say, and I'd love help making this nicer
# reflection assigns the same memory addr to each method otherwise.
def get(self, method_type=method_type, method_name=method_name,
valid_status_codes=valid_status_codes,
resource=resource_class, data=None, uid=None, **kwargs):
return self.call_api(
method_type, method_name,
valid_status_codes, resource,
data, uid=uid, **kwargs)
def put(self, method_type=method_type, method_name=method_name,
valid_status_codes=valid_status_codes,
resource=resource_class, data=None, uid=None, **kwargs):
return self.call_api(
method_type, method_name,
valid_status_codes, resource,
data, uid=uid, **kwargs)
def post(self, method_type=method_type, method_name=method_name,
valid_status_codes=valid_status_codes,
resource=resource_class, data=None, uid=None, **kwargs):
return self.call_api(
method_type, method_name,
valid_status_codes, resource,
data, uid=uid, **kwargs)
def patch(self, method_type=method_type, method_name=method_name,
valid_status_codes=valid_status_codes,
resource=resource_class, data=None, uid=None, **kwargs):
return self.call_api(
method_type, method_name,
valid_status_codes, resource,
data, uid=uid, **kwargs)
def delete(self, method_type=method_type, method_name=method_name,
valid_status_codes=valid_status_codes,
resource=resource_class, data=None, uid=None, **kwargs):
return self.call_api(
method_type, method_name,
valid_status_codes, resource,
data, uid=uid, **kwargs)
method_map = {
'GET': get,
'PUT': put,
'POST': post,
'PATCH': patch,
'DELETE': delete
}
setattr(
self, method_name,
types.MethodType(method_map[method_type], self)
) | python | {
"resource": ""
} |
q41407 | command | train | def command(state, args):
"""Purge all caches."""
state.cache_manager.teardown()
state.cache_manager.setup()
EpisodeTypes.forget(state.db)
del state.file_picker | python | {
"resource": ""
} |
q41408 | Results.append | train | def append(self, row):
"""Append a result row and check its length.
>>> x = Results(['title', 'type'])
>>> x.append(('Konosuba', 'TV'))
>>> x
Results(['title', 'type'], [('Konosuba', 'TV')])
>>> x.append(('Konosuba',))
Traceback (most recent call last):
...
ValueError: Wrong result row length
"""
row = tuple(row)
if len(row) != self.table_width:
raise ValueError('Wrong result row length')
self.results.append(row) | python | {
"resource": ""
} |
q41409 | Results.set | train | def set(self, results):
"""Set results.
results is an iterable of tuples, where each tuple is a row of results.
>>> x = Results(['title'])
>>> x.set([('Konosuba',), ('Oreimo',)])
>>> x
Results(['title'], [('Konosuba',), ('Oreimo',)])
"""
self.results = list()
for row in results:
self.append(row) | python | {
"resource": ""
} |
q41410 | command | train | def command(state, args):
"""Unregister watching regexp for an anime."""
args = parser.parse_args(args[1:])
if args.complete:
query.files.delete_regexp_complete(state.db)
else:
if args.aid is None:
parser.print_help()
else:
aid = state.results.parse_aid(args.aid, default_key='db')
query.files.delete_regexp(state.db, aid) | python | {
"resource": ""
} |
q41411 | smooth_hanning | train | def smooth_hanning(x, size=11):
"""smooth a 1D array using a hanning window with requested size."""
if x.ndim != 1:
raise ValueError, "smooth_hanning only accepts 1-D arrays."
if x.size < size:
raise ValueError, "Input vector needs to be bigger than window size."
if size < 3:
return x
s = np.r_[x[size - 1:0:-1], x, x[-1:-size:-1]]
w = np.hanning(size)
y = np.convolve(w / w.sum(), s, mode='valid')
return y | python | {
"resource": ""
} |
q41412 | vspht | train | def vspht(vsphere, nmax=None, mmax=None):
"""Returns a VectorCoefs object containt the vector spherical harmonic
coefficients of the VectorPatternUniform object"""
if nmax == None:
nmax = vsphere.nrows - 2
mmax = int(vsphere.ncols / 2) - 1
elif mmax == None:
mmax = nmax
if mmax > nmax:
raise ValueError(err_msg['nmax_g_mmax'])
if nmax >= vsphere.nrows - 1:
raise ValueError(err_msg['nmax_too_lrg'])
if mmax >= vsphere.ncols / 2:
raise ValueError(err_msg['mmax_too_lrg'])
dnrows = vsphere._tdsphere.shape[0]
ncols = vsphere._tdsphere.shape[1]
if np.mod(ncols, 2) == 1:
raise ValueError(err_msg['ncols_even'])
ft = np.fft.fft2(vsphere._tdsphere) / (dnrows * ncols)
ops.fix_even_row_data_fc(ft)
ft_extended = np.zeros([dnrows + 2, ncols], dtype=np.complex128)
ops.pad_rows_fdata(ft, ft_extended)
pt = np.fft.fft2(vsphere._pdsphere) / (dnrows * ncols)
ops.fix_even_row_data_fc(pt)
pt_extended = np.zeros([dnrows + 2, ncols], dtype=np.complex128)
ops.pad_rows_fdata(pt, pt_extended)
ftmp = np.copy(ft_extended)
ptmp = np.copy(pt_extended)
Lf1 = ops.sinLdot_fc(ft_extended, pt_extended)
Lf2 = ops.sinLdot_fc(-1j * ptmp, 1j * ftmp)
# check if we are using c extended versions of the code or not
if use_cext:
N = nmax + 1;
NC = N + mmax * (2 * N - mmax - 1);
sc1 = np.zeros(NC, dtype=np.complex128)
sc2 = np.zeros(NC, dtype=np.complex128)
csphi.fc_to_sc(Lf1, sc1, nmax, mmax)
csphi.fc_to_sc(Lf2, sc2, nmax, mmax)
else:
sc1 = pysphi.fc_to_sc(Lf1, nmax, mmax)
sc2 = pysphi.fc_to_sc(Lf2, nmax, mmax)
vcoefs = VectorCoefs(sc1, sc2, nmax, mmax)
nvec = np.zeros(nmax + 1, dtype=np.complex128)
for n in xrange(1, nmax + 1):
nvec[n] = 1.0 / np.sqrt(n * (n + 1.0))
vcoefs.scoef1.window(nvec)
vcoefs.scoef2.window(nvec)
return vcoefs | python | {
"resource": ""
} |
q41413 | ScalarCoefs._reshape_n_vecs | train | def _reshape_n_vecs(self):
"""return list of arrays, each array represents a different m mode"""
lst = []
sl = slice(None, None, None)
lst.append(self.__getitem__((sl, 0)))
for m in xrange(1, self.mmax + 1):
lst.append(self.__getitem__((sl, -m)))
lst.append(self.__getitem__((sl, m)))
return lst | python | {
"resource": ""
} |
q41414 | ScalarCoefs._reshape_m_vecs | train | def _reshape_m_vecs(self):
"""return list of arrays, each array represents a different n mode"""
lst = []
for n in xrange(0, self.nmax + 1):
mlst = []
if n <= self.mmax:
nn = n
else:
nn = self.mmax
for m in xrange(-nn, nn + 1):
mlst.append(self.__getitem__((n, m)))
lst.append(mlst)
return lst | python | {
"resource": ""
} |
q41415 | ScalarCoefs._scalar_coef_op_left | train | def _scalar_coef_op_left(func):
"""decorator for operator overloading when ScalarCoef is on the
left"""
@wraps(func)
def verif(self, scoef):
if isinstance(scoef, ScalarCoefs):
if len(self._vec) == len(scoef._vec):
return ScalarCoefs(func(self, self._vec, scoef._vec),
self.nmax,
self.mmax)
else:
raise ValueError(err_msg['SC_sz_msmtch'] % \
(self.nmax, self.mmax,
scoef.nmax, scoef.mmax))
elif isinstance(scoef, numbers.Number):
return ScalarCoefs(func(self, self._vec, scoef), self.nmax,
self.mmax)
else:
raise TypeError(err_msg['no_combi_SC'])
return verif | python | {
"resource": ""
} |
q41416 | ScalarCoefs._scalar_coef_op_right | train | def _scalar_coef_op_right(func):
"""decorator for operator overloading when ScalarCoef is on the
right"""
@wraps(func)
def verif(self, scoef):
if isinstance(scoef, numbers.Number):
return ScalarCoefs(func(self, self._vec, scoef),
self.nmax, self.mmax)
else:
raise TypeError(err_msg['no_combi_SC'])
return verif | python | {
"resource": ""
} |
q41417 | VectorCoefs._vector_coef_op_left | train | def _vector_coef_op_left(func):
"""decorator for operator overloading when VectorCoef is on the
left"""
@wraps(func)
def verif(self, vcoef):
if isinstance(vcoef, VectorCoefs):
if len(vcoef.scoef1._vec) == len(vcoef.scoef1._vec):
return VectorCoefs(func(self, self.scoef1._vec,
vcoef.scoef1._vec),
func(self, self.scoef2._vec,
vcoef.scoef2._vec),
self.nmax,
self.mmax)
else:
raise ValueError(err_msg['VC_sz_msmtch'] % \
(self.nmax, self.mmax,
vcoef.nmax, vcoef.mmax))
elif isinstance(vcoef, numbers.Number):
return VectorCoefs(func(self, self.scoef1._vec, vcoef),
func(self, self.scoef2._vec, vcoef),
self.nmax,
self.mmax)
else:
raise TypeError(err_msg['no_combi_VC'])
return verif | python | {
"resource": ""
} |
q41418 | VectorCoefs._vector_coef_op_right | train | def _vector_coef_op_right(func):
"""decorator for operator overloading when VectorCoefs is on the
right"""
@wraps(func)
def verif(self, vcoef):
if isinstance(vcoef, numbers.Number):
return VectorCoefs(func(self, self.scoef1._vec, vcoef),
func(self, self.scoef2._vec, vcoef),
self.nmax, self.mmax)
else:
raise TypeError(err_msg['no_combi_VC'])
return verif | python | {
"resource": ""
} |
q41419 | ScalarPatternUniform._scalar_pattern_uniform_op_left | train | def _scalar_pattern_uniform_op_left(func):
"""Decorator for operator overloading when ScalarPatternUniform is on
the left."""
@wraps(func)
def verif(self, patt):
if isinstance(patt, ScalarPatternUniform):
if self._dsphere.shape == patt._dsphere.shape:
return ScalarPatternUniform(func(self, self._dsphere,
patt._dsphere),
doublesphere=True)
else:
raise ValueError(err_msg['SP_sz_msmtch'] % \
(self.nrows, self.ncols,
patt.nrows, patt.ncols))
elif isinstance(patt, numbers.Number):
return ScalarPatternUniform(func(self, self._dsphere, patt),
doublesphere=True)
else:
raise TypeError(err_msg['no_combi_SP'])
return verif | python | {
"resource": ""
} |
q41420 | ScalarPatternUniform._scalar_pattern_uniform_op_right | train | def _scalar_pattern_uniform_op_right(func):
"""Decorator for operator overloading when ScalarPatternUniform is on
the right."""
@wraps(func)
def verif(self, patt):
if isinstance(patt, numbers.Number):
return ScalarPatternUniform(func(self, self._dsphere, patt),
doublesphere=True)
else:
raise TypeError(err_msg['no_combi_SP'])
return verif | python | {
"resource": ""
} |
q41421 | TransversePatternUniform.single_val | train | def single_val(self):
"""return relative error of worst point that might make the data none
symmetric.
"""
sv_t = self._sv(self._tdsphere)
sv_p = self._sv(self._tdsphere)
return (sv_t, sv_p) | python | {
"resource": ""
} |
q41422 | TransversePatternUniform._vector_pattern_uniform_op_left | train | def _vector_pattern_uniform_op_left(func):
"""decorator for operator overloading when VectorPatternUniform is on
the left"""
@wraps(func)
def verif(self, patt):
if isinstance(patt, TransversePatternUniform):
if self._tdsphere.shape == patt._tdsphere.shape:
return TransversePatternUniform(func(self, self._tdsphere,
patt._tdsphere),
func(self, self._pdsphere,
patt._pdsphere),
doublesphere=True)
else:
raise ValueError(err_msg['VP_sz_msmtch'] % \
(self.nrows, self.ncols,
patt.nrows, patt.ncols))
elif isinstance(patt, numbers.Number):
return TransversePatternUniform(func(self, self._tdsphere, patt),
func(self, self._pdsphere, patt),
doublesphere=True)
else:
raise TypeError(err_msg['no_combi_VP'])
return verif | python | {
"resource": ""
} |
q41423 | TransversePatternUniform._vector_pattern_uniform_op_right | train | def _vector_pattern_uniform_op_right(func):
"""decorator for operator overloading when VectorPatternUniform is on
the right"""
@wraps(func)
def verif(self, patt):
if isinstance(patt, numbers.Number):
return TransversePatternUniform(func(self, self._tdsphere, patt),
func(self, self._pdsphere, patt),
doublesphere=True)
else:
raise TypeError(err_msg['no_combi_VP'])
return verif | python | {
"resource": ""
} |
q41424 | Device.async_set_port_poe_mode | train | async def async_set_port_poe_mode(self, port_idx, mode):
"""Set port poe mode.
Auto, 24v, passthrough, off.
Make sure to not overwrite any existing configs.
"""
no_existing_config = True
for port_override in self.port_overrides:
if port_idx == port_override['port_idx']:
port_override['poe_mode'] = mode
no_existing_config = False
break
if no_existing_config:
self.port_overrides.append({
'port_idx': port_idx,
'portconf_id': self.ports[port_idx].portconf_id,
'poe_mode': mode
})
url = 's/{site}/rest/device/' + self.id
data = {'port_overrides': self.port_overrides}
await self._request('put', url, json=data) | python | {
"resource": ""
} |
q41425 | list_remotes | train | def list_remotes(device=None, address=None):
"""
List the available remotes.
All parameters are passed to irsend. See the man page for irsend
for details about their usage.
Parameters
----------
device: str
address: str
Returns
-------
[str]
Notes
-----
No attempt is made to catch or handle errors. See the documentation
for subprocess.check_output to see the types of exceptions it may raise.
"""
output = _call(["list", "", ""], None, device, address)
remotes = [l.split()[-1] for l in output.splitlines() if l]
return remotes | python | {
"resource": ""
} |
q41426 | list_codes | train | def list_codes(remote, device=None, address=None):
"""
List the codes for a given remote.
All parameters are passed to irsend. See the man page for irsend
for details about their usage.
Parameters
----------
remote: str
device: str
address: str
Returns
-------
[str]
Notes
-----
No attempt is made to catch or handle errors. See the documentation
for subprocess.check_output to see the types of exceptions it may raise.
"""
output = _call(["list", remote, ""], None, device, address)
codes = [l.split()[-1] for l in output.splitlines() if l]
return codes | python | {
"resource": ""
} |
q41427 | check_updates | train | def check_updates():
"""Check and display upgraded packages
"""
count, packages = fetch()
message = "No news is good news !"
if count > 0:
message = ("{0} software updates are available\n".format(count))
return [message, count, packages] | python | {
"resource": ""
} |
q41428 | _init_check_upodates | train | def _init_check_upodates():
"""Sub function for init
"""
message, count, packages = check_updates()
if count > 0:
print(message)
for pkg in packages:
print("{0}".format(pkg))
else:
print(message) | python | {
"resource": ""
} |
q41429 | init | train | def init():
"""Initialization , all begin from here
"""
su()
args = sys.argv
args.pop(0)
cmd = "{0}sun_daemon".format(bin_path)
if len(args) == 1:
if args[0] == "start":
print("Starting SUN daemon: {0} &".format(cmd))
subprocess.call("{0} &".format(cmd), shell=True)
elif args[0] == "stop":
print("Stopping SUN daemon: {0}".format(cmd))
subprocess.call("killall sun_daemon", shell=True)
elif args[0] == "restart":
print("Stopping SUN daemon: {0}".format(cmd))
subprocess.call("killall sun_daemon", shell=True)
print("Starting SUN daemon: {0} &".format(cmd))
subprocess.call("{0} &".format(cmd), shell=True)
elif args[0] == "check":
_init_check_upodates()
elif args[0] == "status":
print(daemon_status())
elif args[0] == "help":
usage()
elif args[0] == "info":
print(os_info())
else:
print("try: 'sun help'")
elif len(args) == 2 and args[0] == "start" and args[1] == "--gtk":
subprocess.call("{0} {1}".format(cmd, "start--gtk"), shell=True)
else:
print("try: 'sun help'") | python | {
"resource": ""
} |
q41430 | cli | train | def cli(parser):
'''
Uninstall inactive Python packages from all accessible site-packages directories.
Inactive Python packages
when multiple packages with the same name are installed
'''
parser.add_argument('-n', '--dry-run', action='store_true', help='Print cleanup actions without running')
opts = parser.parse_args()
for sitedir in site.getsitepackages():
cleanup(sitedir, execute=not opts.dry_run, verbose=opts.verbose or opts.dry_run) | python | {
"resource": ""
} |
q41431 | GtkStatusIcon.daemon_start | train | def daemon_start(self):
"""Start daemon when gtk loaded
"""
if daemon_status() == "SUN not running":
subprocess.call("{0} &".format(self.cmd), shell=True) | python | {
"resource": ""
} |
q41432 | GtkStatusIcon.sub_menu | train | def sub_menu(self):
"""Create daemon submenu
"""
submenu = gtk.Menu()
self.start = gtk.ImageMenuItem("Start")
self.stop = gtk.ImageMenuItem("Stop")
self.restart = gtk.ImageMenuItem("Restart")
self.status = gtk.ImageMenuItem("Status")
self.start.show()
self.stop.show()
self.restart.show()
self.status.show()
img_Start = gtk.image_new_from_stock(gtk.STOCK_MEDIA_PLAY,
gtk.ICON_SIZE_MENU)
img_Start.show()
self.start.set_image(img_Start)
img_Stop = gtk.image_new_from_stock(gtk.STOCK_STOP,
gtk.ICON_SIZE_MENU)
img_Stop.show()
self.stop.set_image(img_Stop)
img_Restart = gtk.image_new_from_stock(gtk.STOCK_REFRESH,
gtk.ICON_SIZE_MENU)
img_Restart.show()
self.restart.set_image(img_Restart)
img_Status = gtk.image_new_from_stock(gtk.STOCK_DIALOG_QUESTION,
gtk.ICON_SIZE_MENU)
img_Status.show()
self.status.set_image(img_Status)
submenu.append(self.start)
submenu.append(self.stop)
submenu.append(self.restart)
submenu.append(self.status)
self.daemon = gtk.ImageMenuItem("Daemon")
self.img_daemon = gtk.image_new_from_stock(self.daemon_STOCK,
gtk.ICON_SIZE_MENU)
self.img_daemon.show()
self.daemon.set_submenu(submenu) | python | {
"resource": ""
} |
q41433 | GtkStatusIcon.menu | train | def menu(self, event_button, event_time, data=None):
"""Create popup menu
"""
self.sub_menu()
menu = gtk.Menu()
menu.append(self.daemon)
separator = gtk.SeparatorMenuItem()
menu_Check = gtk.ImageMenuItem("Check updates")
img_Check = gtk.image_new_from_stock(gtk.STOCK_OK,
gtk.ICON_SIZE_MENU)
img_Check.show()
menu_Info = gtk.ImageMenuItem("OS Info")
img_Info = gtk.image_new_from_stock(gtk.STOCK_INFO,
gtk.ICON_SIZE_MENU)
img_Info.show()
menu_About = gtk.ImageMenuItem("About")
img_About = gtk.image_new_from_stock(gtk.STOCK_ABOUT,
gtk.ICON_SIZE_MENU)
img_About.show()
self.daemon.set_image(self.img_daemon)
menu.append(self.daemon)
self.daemon.show()
menu_Quit = gtk.ImageMenuItem("Quit")
img_Quit = gtk.image_new_from_stock(gtk.STOCK_QUIT, gtk.ICON_SIZE_MENU)
img_Quit.show()
menu_Check.set_image(img_Check)
menu_Info.set_image(img_Info)
menu_About.set_image(img_About)
menu_Quit.set_image(img_Quit)
menu.append(menu_Check)
menu.append(menu_Info)
menu.append(separator)
menu.append(menu_About)
menu.append(menu_Quit)
separator.show()
menu_Check.show()
menu_Info.show()
menu_About.show()
menu_Quit.show()
menu_Check.connect_object("activate", self._Check, " ")
menu_Info.connect_object("activate", self._Info, "OS Info")
menu_About.connect_object("activate", self._About, "SUN")
self.start.connect_object("activate", self._start, "Start daemon ")
self.stop.connect_object("activate", self._stop, "Stop daemon ")
self.restart.connect_object("activate", self._restart,
"Restart daemon ")
self.status.connect_object("activate", self._status, daemon_status())
menu_Quit.connect_object("activate", self._Quit, "stop")
menu.popup(None, None, None, event_button, event_time, data) | python | {
"resource": ""
} |
q41434 | GtkStatusIcon.message | train | def message(self, data):
"""Function to display messages to the user
"""
msg = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO,
gtk.BUTTONS_CLOSE, data)
msg.set_resizable(1)
msg.set_title(self.dialog_title)
self.img.set_from_file(self.sun_icon)
msg.set_image(self.img)
msg.show_all()
msg.run()
msg.destroy() | python | {
"resource": ""
} |
q41435 | GtkStatusIcon.right_click | train | def right_click(self, data, event_button, event_time):
"""Right click handler
"""
self.menu(event_button, event_time, data) | python | {
"resource": ""
} |
q41436 | IntentParser.parse | train | def parse(payload, candidate_classes):
""" Parse a json response into an intent.
:param payload: a JSON object representing an intent.
:param candidate_classes: a list of classes representing various
intents, each having their own `parse`
method to attempt parsing the JSON object
into the given intent class.
:return: An object version of the intent if one of the candidate
classes managed to parse it, or None.
"""
for cls in candidate_classes:
intent = cls.parse(payload)
if intent:
return intent
return None | python | {
"resource": ""
} |
q41437 | IntentParser.parse_instant_time | train | def parse_instant_time(slot):
""" Parse a slot into an InstantTime object.
Sample response:
{
"entity": "snips/datetime",
"range": {
"end": 36,
"start": 28
},
"rawValue": "tomorrow",
"slotName": "weatherForecastStartDatetime",
"value": {
"grain": "Day",
"kind": "InstantTime",
"precision": "Exact",
"value": "2017-09-15 00:00:00 +00:00"
}
}
:param slot: a intent slot.
:return: a parsed InstantTime object, or None.
"""
date = IntentParser.get_dict_value(slot, ['value', 'value'])
if not date:
return None
date = parse(date)
if not date:
return None
grain = InstantTime.parse_grain(
IntentParser.get_dict_value(slot,
['value', 'grain']))
return InstantTime(date, grain) | python | {
"resource": ""
} |
q41438 | IntentParser.parse_time_interval | train | def parse_time_interval(slot):
""" Parse a slot into a TimeInterval object.
Sample response:
{
"entity": "snips/datetime",
"range": {
"end": 42,
"start": 13
},
"rawValue": "between tomorrow and saturday",
"slotName": "weatherForecastStartDatetime",
"value": {
"from": "2017-09-15 00:00:00 +00:00",
"kind": "TimeInterval",
"to": "2017-09-17 00:00:00 +00:00"
}
}
:param slot: a intent slot.
:return: a parsed TimeInterval object, or None.
"""
start = IntentParser.get_dict_value(
slot, ['value', 'from'])
end = IntentParser.get_dict_value(slot, ['value', 'to'])
if not start or not end:
return None
start = parse(start)
end = parse(end)
if not start or not end:
return None
return TimeInterval(start, end) | python | {
"resource": ""
} |
q41439 | CSViewer.csview | train | def csview(self, view=False):
"""View chemical shift values organized by amino acid residue.
:param view: Open in default image viewer or save file in current working directory quietly.
:type view: :py:obj:`True` or :py:obj:`False`
:return: None
:rtype: :py:obj:`None`
"""
for starfile in fileio.read_files(self.from_path):
chains = starfile.chem_shifts_by_residue(amino_acids=self.amino_acids,
atoms=self.atoms,
amino_acids_and_atoms=self.amino_acids_and_atoms,
nmrstar_version=self.nmrstar_version)
for idx, chemshifts_dict in enumerate(chains):
nodes = []
edges = []
for seq_id in chemshifts_dict:
aaname = "{}_{}".format(chemshifts_dict[seq_id]["AA3Code"], seq_id)
label = '"{{{}|{}}}"'.format(seq_id, chemshifts_dict[seq_id]["AA3Code"])
color = 8
aanode_entry = " {} [label={}, fillcolor={}]".format(aaname, label, color)
nodes.append(aanode_entry)
currnodename = aaname
for atom_type in chemshifts_dict[seq_id]:
if atom_type in ["AA3Code", "Seq_ID"]:
continue
else:
atname = "{}_{}".format(aaname, atom_type)
label = '"{{{}|{}}}"'.format(atom_type, chemshifts_dict[seq_id][atom_type])
if atom_type.startswith("H"):
color = 4
elif atom_type.startswith("C"):
color = 6
elif atom_type.startswith("N"):
color = 10
else:
color = 8
atnode_entry = "{} [label={}, fillcolor={}]".format(atname, label, color)
nextnodename = atname
nodes.append(atnode_entry)
edges.append("{} -> {}".format(currnodename, nextnodename))
currnodename = nextnodename
if self.filename is None:
filename = "{}_{}".format(starfile.id, idx)
else:
filename = "{}_{}".format(self.filename, idx)
src = Source(self.dot_template.format("\n".join(nodes), "\n".join(edges)), format=self.csview_format)
src.render(filename=filename, view=view) | python | {
"resource": ""
} |
q41440 | DomainPartitionIter.getStringPartition | train | def getStringPartition(self):
"""
Get the string representation of the current partition
@return string like ":-1,0:2"
"""
res = ''
for s in self.partitions[self.index].getSlice():
start = ''
stop = ''
if s.start is not None:
start = int(s.start)
if s.stop is not None:
stop = int(s.stop)
res += '{0}:{1},'.format(start, stop)
return res | python | {
"resource": ""
} |
q41441 | urlopen | train | def urlopen(link):
"""Return urllib2 urlopen
"""
try:
return urllib2.urlopen(link)
except urllib2.URLError:
pass
except ValueError:
return ""
except KeyboardInterrupt:
print("")
raise SystemExit() | python | {
"resource": ""
} |
q41442 | ins_packages | train | def ins_packages():
"""Count installed Slackware packages
"""
count = 0
for pkg in os.listdir(pkg_path):
if not pkg.startswith("."):
count += 1
return count | python | {
"resource": ""
} |
q41443 | read_config | train | def read_config(config):
"""Read config file and return uncomment line
"""
for line in config.splitlines():
line = line.lstrip()
if line and not line.startswith("#"):
return line
return "" | python | {
"resource": ""
} |
q41444 | mirror | train | def mirror():
"""Get mirror from slackpkg mirrors file
"""
slack_mirror = read_config(
read_file("{0}{1}".format(etc_slackpkg, "mirrors")))
if slack_mirror:
return slack_mirror + changelog_txt
else:
print("\nYou do not have any mirror selected in /etc/slackpkg/mirrors"
"\nPlease edit that file and uncomment ONE mirror.\n")
return "" | python | {
"resource": ""
} |
q41445 | fetch | train | def fetch():
"""Get ChangeLog.txt file size and counts upgraded packages
"""
mir, r, slackpkg_last_date = mirror(), "", ""
count, upgraded = 0, []
if mir:
tar = urlopen(mir)
try:
r = tar.read()
except AttributeError:
print("sun: error: can't read mirror")
if os.path.isfile(var_lib_slackpkg + changelog_txt):
slackpkg_last_date = read_file("{0}{1}".format(
var_lib_slackpkg, changelog_txt)).split("\n", 1)[0].strip()
else:
return [count, upgraded]
for line in r.splitlines():
if slackpkg_last_date == line.strip():
break
if (line.endswith("z: Upgraded.") or line.endswith("z: Rebuilt.") or
line.endswith("z: Added.") or line.endswith("z: Removed.")):
upgraded.append(line.split("/")[-1])
count += 1
if (line.endswith("*: Upgraded.") or line.endswith("*: Rebuilt.") or
line.endswith("*: Added.") or line.endswith("*: Removed.")):
upgraded.append(line)
count += 1
return [count, upgraded] | python | {
"resource": ""
} |
q41446 | config | train | def config():
"""Return sun configuration values
"""
conf_args = {
"INTERVAL": 60,
"STANDBY": 3
}
config_file = read_file("{0}{1}".format(conf_path, "sun.conf"))
for line in config_file.splitlines():
line = line.lstrip()
if line and not line.startswith("#"):
conf_args[line.split("=")[0]] = line.split("=")[1]
return conf_args | python | {
"resource": ""
} |
q41447 | os_info | train | def os_info():
"""Get OS info
"""
stype = ""
slack, ver = slack_ver()
mir = mirror()
if mir:
if "current" in mir:
stype = "Current"
else:
stype = "Stable"
info = (
"User: {0}\n"
"OS: {1}\n"
"Version: {2}\n"
"Type: {3}\n"
"Arch: {4}\n"
"Kernel: {5}\n"
"Packages: {6}".format(getpass.getuser(), slack, ver, stype,
os.uname()[4], os.uname()[2], ins_packages()))
return info | python | {
"resource": ""
} |
q41448 | getPrimeFactors | train | def getPrimeFactors(n):
"""
Get all the prime factor of given integer
@param n integer
@return list [1, ..., n]
"""
lo = [1]
n2 = n // 2
k = 2
for k in range(2, n2 + 1):
if (n // k)*k == n:
lo.append(k)
return lo + [n, ] | python | {
"resource": ""
} |
q41449 | CubeDecomp.getNeighborProc | train | def getNeighborProc(self, proc, offset, periodic=None):
"""
Get the neighbor to a processor
@param proc the reference processor rank
@param offset displacement, e.g. (1, 0) for north, (0, -1) for west,...
@param periodic boolean list of True/False values, True if axis is
periodic, False otherwise
@note will return None if there is no neighbor
"""
if self.mit is None:
# no decomp, just exit
return None
inds = [self.mit.getIndicesFromBigIndex(proc)[d] + offset[d]
for d in range(self.ndims)]
if periodic is not None and self.decomp is not None:
# apply modulo operation on periodic axes
for d in range(self.ndims):
if periodic[d]:
inds[d] = inds[d] % self.decomp[d]
if self.mit.areIndicesValid(inds):
return self.mit.getBigIndexFromIndices(inds)
else:
return None | python | {
"resource": ""
} |
q41450 | CubeDecomp.__computeDecomp | train | def __computeDecomp(self):
"""
Compute optimal dedomposition, each sub-domain has the
same volume in index space.
@return list if successful, empty list if not successful
"""
primeNumbers = [getPrimeFactors(d) for d in self.globalDims]
ns = [len(pns) for pns in primeNumbers]
validDecomps = []
self.validProcs = []
for it in MultiArrayIter(ns):
inds = it.getIndices()
decomp = [primeNumbers[d][inds[d]] for d in range(self.ndims)]
self.validProcs.append(reduce(operator.mul, decomp, 1))
if reduce(operator.mul, decomp, 1) == self.nprocs:
validDecomps.append(decomp)
# sort and remove duplicates
self.validProcs.sort()
vprocs = []
for vp in self.validProcs:
if len(vprocs) == 0 or (len(vprocs) >= 1 and vp != vprocs[-1]):
vprocs.append(vp)
self.validProcs = vprocs
if len(validDecomps) == 0:
# no solution
return
# find the optimal decomp among all valid decomps
minCost = float('inf')
bestDecomp = validDecomps[0]
for decomp in validDecomps:
sizes = [self.globalDims[d]//decomp[d] for d in range(self.ndims)]
volume = reduce(operator.mul, sizes, 1)
surface = 0
for d in range(self.ndims):
surface += 2*reduce(operator.mul, sizes[:d], 1) * \
reduce(operator.mul, sizes[d+1:], 1)
cost = surface / float(volume)
if cost < minCost:
bestDecomp = decomp
minCost = cost
self.decomp = bestDecomp
# ok, we have a valid decomp, now build the sub-domain iterator
self.mit = MultiArrayIter(self.decomp, rowMajor=self.rowMajor)
# fill in the proc to index set map
procId = 0
self.proc2IndexSet = {}
numCellsPerProc = [self.globalDims[d]//self.decomp[d]
for d in range(self.ndims)]
for it in self.mit:
nps = it.getIndices()
self.proc2IndexSet[procId] = []
for d in range(self.ndims):
sbeg = nps[d]*numCellsPerProc[d]
send = (nps[d] + 1)*numCellsPerProc[d]
self.proc2IndexSet[procId].append(slice(sbeg, send))
procId += 1 | python | {
"resource": ""
} |
q41451 | APIConstructor._generate_manager | train | def _generate_manager(manager_config):
'''
Generate a manager from a manager_config dictionary
Parameters
----------
manager_config : dict
Configuration with keys class, args, and kwargs
used to generate a new datafs.manager object
Returns
-------
manager : object
datafs.managers.MongoDBManager or
datafs.managers.DynamoDBManager object
initialized with *args, **kwargs
Examples
--------
Generate a dynamo manager:
.. code-block:: python
>>> mgr = APIConstructor._generate_manager({
... 'class': 'DynamoDBManager',
... 'kwargs': {
... 'table_name': 'data-from-yaml',
... 'session_args': {
... 'aws_access_key_id': "access-key-id-of-your-choice",
... 'aws_secret_access_key': "secret-key-of-your-choice"},
... 'resource_args': {
... 'endpoint_url':'http://localhost:8000/',
... 'region_name':'us-east-1'}
... }
... })
>>>
>>> from datafs.managers.manager_dynamo import DynamoDBManager
>>> assert isinstance(mgr, DynamoDBManager)
>>>
>>> 'data-from-yaml' in mgr.table_names
False
>>> mgr.create_archive_table('data-from-yaml')
>>> 'data-from-yaml' in mgr.table_names
True
>>> mgr.delete_table('data-from-yaml')
'''
if 'class' not in manager_config:
raise ValueError(
'Manager not fully specified. Give '
'"class:manager_name", e.g. "class:MongoDBManager".')
mgr_class_name = manager_config['class']
if mgr_class_name.lower()[:5] == 'mongo':
from datafs.managers.manager_mongo import (
MongoDBManager as mgr_class)
elif mgr_class_name.lower()[:6] == 'dynamo':
from datafs.managers.manager_dynamo import (
DynamoDBManager as mgr_class)
else:
raise KeyError(
'Manager class "{}" not recognized. Choose from {}'.format(
mgr_class_name, 'MongoDBManager or DynamoDBManager'))
manager = mgr_class(
*manager_config.get('args', []),
**manager_config.get('kwargs', {}))
return manager | python | {
"resource": ""
} |
q41452 | APIConstructor._generate_service | train | def _generate_service(service_config):
'''
Generate a service from a service_config dictionary
Parameters
----------
service_config : dict
Configuration with keys service, args, and
kwargs used to generate a new fs service
object
Returns
-------
service : object
fs service object initialized with *args,
**kwargs
Examples
--------
Generate a temporary filesystem (no arguments
required):
.. code-block:: python
>>> tmp = APIConstructor._generate_service(
... {'service': 'TempFS'})
...
>>> from fs.tempfs import TempFS
>>> assert isinstance(tmp, TempFS)
>>> import os
>>> assert os.path.isdir(tmp.getsyspath('/'))
>>> tmp.close()
Generate a system filesystem in a temporary
directory:
.. code-block:: python
>>> import tempfile
>>> tempdir = tempfile.mkdtemp()
>>> local = APIConstructor._generate_service(
... {
... 'service': 'OSFS',
... 'args': [tempdir]
... })
...
>>> from fs.osfs import OSFS
>>> assert isinstance(local, OSFS)
>>> import os
>>> assert os.path.isdir(local.getsyspath('/'))
>>> local.close()
>>> import shutil
>>> shutil.rmtree(tempdir)
Mock an S3 filesystem with moto:
.. code-block:: python
>>> import moto
>>> m = moto.mock_s3()
>>> m.start()
>>> s3 = APIConstructor._generate_service(
... {
... 'service': 'S3FS',
... 'args': ['bucket-name'],
... 'kwargs': {
... 'aws_access_key':'MY_KEY',
... 'aws_secret_key':'MY_SECRET_KEY'
... }
... })
...
>>> from fs.s3fs import S3FS
>>> assert isinstance(s3, S3FS)
>>> m.stop()
'''
filesystems = []
for _, modname, _ in pkgutil.iter_modules(fs.__path__):
if modname.endswith('fs'):
filesystems.append(modname)
service_mod_name = service_config['service'].lower()
assert_msg = 'Filesystem "{}" not found in pyFilesystem {}'.format(
service_mod_name, fs.__version__)
assert service_mod_name in filesystems, assert_msg
svc_module = importlib.import_module('fs.{}'.format(service_mod_name))
svc_class = svc_module.__dict__[service_config['service']]
service = svc_class(*service_config.get('args', []),
**service_config.get('kwargs', {}))
return service | python | {
"resource": ""
} |
q41453 | StencilOperator.addStencilBranch | train | def addStencilBranch(self, disp, weight):
"""
Set or overwrite the stencil weight for the given direction
@param disp displacement vector
@param weight stencil weight
"""
self.stencil[tuple(disp)] = weight
self.__setPartionLogic(disp) | python | {
"resource": ""
} |
q41454 | StencilOperator.apply | train | def apply(self, localArray):
"""
Apply stencil to data
@param localArray local array
@return new array on local proc
"""
# input dist array
inp = daZeros(localArray.shape, localArray.dtype)
inp[...] = localArray
inp.setComm(self.comm)
# output array
out = numpy.zeros(localArray.shape, localArray.dtype)
# expose the dist array windows
for disp, dpi in self.dpis.items():
srcs = dpi['srcs']
remoteWinIds = dpi['remoteWinIds']
numParts = len(srcs)
for i in range(numParts):
inp.expose(srcs[i], winID=remoteWinIds[i])
# apply the stencil
for disp, weight in self.stencil.items():
dpi = self.dpis[disp]
dpi = self.dpis[disp]
srcs = dpi['srcs']
dsts = dpi['dsts']
remoteRanks = dpi['remoteRanks']
remoteWinIds = dpi['remoteWinIds']
numParts = len(srcs)
for i in range(numParts):
srcSlce = srcs[i]
dstSlce = dsts[i]
remoteRank = remoteRanks[i]
remoteWinId = remoteWinIds[i]
# now apply the stencil
if remoteRank == self.myRank:
# local updates
out[dstSlce] += weight*inp[srcSlce]
else:
# remote fetch
out[dstSlce] += weight*inp.getData(remoteRank, remoteWinId)
# some implementations require this
inp.free()
return out | python | {
"resource": ""
} |
q41455 | MultiArrayIter.getIndicesFromBigIndex | train | def getIndicesFromBigIndex(self, bigIndex):
"""
Get index set from given big index
@param bigIndex
@return index set
@note no checks are performed to ensure that the returned
big index is valid
"""
indices = numpy.array([0 for i in range(self.ndims)])
for i in range(self.ndims):
indices[i] = bigIndex // self.dimProd[i] % self.dims[i]
return indices | python | {
"resource": ""
} |
q41456 | MultiArrayIter.getBigIndexFromIndices | train | def getBigIndexFromIndices(self, indices):
"""
Get the big index from a given set of indices
@param indices
@return big index
@note no checks are performed to ensure that the returned
indices are valid
"""
return reduce(operator.add, [self.dimProd[i]*indices[i]
for i in range(self.ndims)], 0) | python | {
"resource": ""
} |
q41457 | MultiArrayIter.areIndicesValid | train | def areIndicesValid(self, inds):
"""
Test if indices are valid
@param inds index set
@return True if valid, False otherwise
"""
return reduce(operator.and_, [0 <= inds[d] < self.dims[d]
for d in range(self.ndims)], True) | python | {
"resource": ""
} |
q41458 | get_setting | train | def get_setting(setting):
""" Get the specified django setting, or it's default value """
defaults = {
# The context to use for rendering fields
'TEMPLATE_FIELD_CONTEXT': {},
# When this is False, don't do any TemplateField rendering
'TEMPLATE_FIELD_RENDER': True
}
try:
return getattr(settings, setting, defaults[setting])
except KeyError:
msg = "{0} is not specified in your settings".format(setting)
raise ImproperlyConfigured(msg) | python | {
"resource": ""
} |
q41459 | Invoice.csv_line_items | train | def csv_line_items(self):
'''
Invoices from lists omit csv-line-items
'''
if not hasattr(self, '_csv_line_items'):
url = '{}/{}'.format(self.base_url, self.id)
self._csv_line_items = self.harvest._get_element_values(url, self.element_name).next().get('csv-line-items', '')
return self._csv_line_items | python | {
"resource": ""
} |
q41460 | Harvest._create_getters | train | def _create_getters(self, klass):
'''
This method creates both the singular and plural getters for various
Harvest object classes.
'''
flag_name = '_got_' + klass.element_name
cache_name = '_' + klass.element_name
setattr(self, cache_name, {})
setattr(self, flag_name, False)
cache = getattr(self, cache_name)
def _get_item(id):
if id in cache:
return cache[id]
else:
url = '{}/{}'.format(klass.base_url, id)
item = self._get_element_values(url, klass.element_name).next()
item = klass(self, item)
cache[id] = item
return item
setattr(self, klass.element_name, _get_item)
def _get_items():
if getattr(self, flag_name):
for item in cache.values():
yield item
else:
for element in self._get_element_values(klass.base_url, klass.element_name):
item = klass(self, element)
cache[item.id] = item
yield item
setattr(self, flag_name, True)
setattr(self, klass.plural_name, _get_items) | python | {
"resource": ""
} |
q41461 | IterableApi.track_purchase | train | def track_purchase(self, user, items, total, purchase_id= None, campaign_id=None,
template_id=None, created_at=None,
data_fields=None):
"""
The 'purchase_id' argument maps to 'id' for this API endpoint.
This name is used to distinguish it from other instances where
'id' is a part of the API request with other Iterable endpoints.
"""
call="/api/commerce/trackPurchase"
payload ={}
if isinstance(user, dict):
payload["user"]= user
else:
raise TypeError('user key is not in Dictionary format')
if isinstance(items, list):
payload["items"]= items
else:
raise TypeError('items are not in Array format')
if isinstance(total, float):
payload["total"]= total
else:
raise TypeError('total is not in correct format')
if purchase_id is not None:
payload["id"]= str(purchase_id)
if campaign_id is not None:
payload["campaignId"]= campaign_id
if template_id is not None:
payload["templateId"]= template_id
if created_at is not None:
payload["createdAt"]= created_at
if data_fields is not None:
payload["data_fields"]= data_fields
return self.api_call(call=call, method="POST", json=payload) | python | {
"resource": ""
} |
q41462 | IterableApi.get_experiment_metrics | train | def get_experiment_metrics(self, path, return_response_object= None,
experiment_id=None, campaign_id=None,
start_date_time=None, end_date_time=None
):
"""
This endpoint doesn't return a JSON object, instead it returns
a series of rows, each its own object. Given this setup, it makes
sense to treat it how we handle our Bulk Export reqeusts.
Arguments:
path: the directory on your computer you wish the file to be downloaded into.
return_response_object: recommended to be set to 'False'. If set to 'True',
will just return the response object as defined by the 'python-requests' module.
"""
call="/api/experiments/metrics"
if isinstance(return_response_object, bool) is False:
raise ValueError("'return_iterator_object'parameter must be a boolean")
payload={}
if experiment_id is not None:
payload["experimentId"]=experiment_id
if campaign_id is not None:
payload["campaignId"]=campaign_id
if start_date_time is not None:
payload["startDateTime"]=start_date_time
if end_date_time is not None:
payload["endDateTime"]=end_date_time
return self.export_data_api(call=call, path=path, params=payload) | python | {
"resource": ""
} |
q41463 | IterableApi.delete_user_by_email | train | def delete_user_by_email(self, email):
"""
This call will delete a user from the Iterable database.
This call requires a path parameter to be passed in, 'email'
in this case, which is why we're just adding this to the 'call'
argument that goes into the 'api_call' request.
"""
call = "/api/users/"+ str(email)
return self.api_call(call=call, method="DELETE") | python | {
"resource": ""
} |
q41464 | IterableApi.get_user_by_email | train | def get_user_by_email(self, email):
"""This function gets a user's data field and info"""
call = "/api/users/"+ str(email)
return self.api_call(call=call, method="GET") | python | {
"resource": ""
} |
q41465 | IterableApi.bulk_update_user | train | def bulk_update_user(self, users):
"""
The Iterable 'Bulk User Update' api Bulk update user data or adds
it if does not exist. Data is merged - missing fields are not deleted
The body of the request takes 1 keys:
1. users -- in the form of an array -- which is the list of users
that we're updating in sets of 50 users at a time, which is the
most that can be batched in a single request.
"""
call = "/api/users/bulkUpdate"
payload = {}
if isinstance(users, list):
payload["users"] = users
else:
raise TypeError ('users are not in Arrary format')
return self.api_call(call=call, method="POST", json=payload) | python | {
"resource": ""
} |
q41466 | IterableApi.disable_device | train | def disable_device(self, token, email=None, user_id=None):
"""
This request manually disable pushes to a device until it comes
online again.
"""
call = "/api/users/disableDevice"
payload ={}
payload["token"] = str(token)
if email is not None:
payload["email"] = str(email)
if user_id is not None:
payload["userId"] = str(user_id)
return self.api_call(call= call, method="POST", json=payload) | python | {
"resource": ""
} |
q41467 | IterableApi.update_user | train | def update_user(self, email=None, data_fields=None, user_id=None,
prefer_userId= None, merge_nested_objects=None):
"""
The Iterable 'User Update' api updates a user profile with new data
fields. Missing fields are not deleted and new data is merged.
The body of the request takes 4 keys:
1. email-- in the form of a string -- used as the unique identifier by
the Iterable database.
2. data fields-- in the form of an object-- these are the additional attributes
of the user that we want to add or update
3. userId- in the form of a string-- another field we can use as a lookup
of the user.
4. mergeNestedObjects-- in the form of an object-- used to merge top level
objects instead of overwriting.
"""
call = "/api/users/update"
payload = {}
if email is not None:
payload["email"] = str(email)
if data_fields is not None:
payload["dataFields"] = data_fields
if user_id is not None:
payload["userId"] = str(user_id)
if prefer_userId is not None:
payload["preferUserId"]= prefer_userId
if merge_nested_objects is not None:
payload["mergeNestedObjects"] = merge_nested_objects
return self.api_call(call=call, method="POST", json=payload) | python | {
"resource": ""
} |
q41468 | cli | train | def cli(
ctx,
config_file=None,
requirements=None,
profile=None):
'''
An abstraction layer for data storage systems
DataFS is a package manager for data. It manages file versions,
dependencies, and metadata for individual use or large organizations.
For more information, see the docs at https://datafs.readthedocs.io
'''
ctx.obj = _DataFSInterface()
ctx.obj.config_file = config_file
ctx.obj.requirements = requirements
ctx.obj.profile = profile
def teardown():
if hasattr(ctx.obj, 'api'):
ctx.obj.api.close()
ctx.call_on_close(teardown) | python | {
"resource": ""
} |
q41469 | create | train | def create(
ctx,
archive_name,
authority_name,
versioned=True,
tag=None,
helper=False):
'''
Create an archive
'''
tags = list(tag)
_generate_api(ctx)
args, kwargs = _parse_args_and_kwargs(ctx.args)
assert len(args) == 0, 'Unrecognized arguments: "{}"'.format(args)
var = ctx.obj.api.create(
archive_name,
authority_name=authority_name,
versioned=versioned,
metadata=kwargs,
tags=tags,
helper=helper)
verstring = 'versioned archive' if versioned else 'archive'
click.echo('created {} {}'.format(verstring, var)) | python | {
"resource": ""
} |
q41470 | update | train | def update(
ctx,
archive_name,
bumpversion='patch',
prerelease=None,
dependency=None,
message=None,
string=False,
file=None):
'''
Update an archive with new contents
'''
_generate_api(ctx)
args, kwargs = _parse_args_and_kwargs(ctx.args)
assert len(args) == 0, 'Unrecognized arguments: "{}"'.format(args)
dependencies_dict = _parse_dependencies(dependency)
var = ctx.obj.api.get_archive(archive_name)
latest_version = var.get_latest_version()
if string:
with var.open(
'w+',
bumpversion=bumpversion,
prerelease=prerelease,
dependencies=dependencies_dict,
metadata=kwargs,
message=message) as f:
if file is None:
for line in sys.stdin:
f.write(u(line))
else:
f.write(u(file))
else:
if file is None:
file = click.prompt('enter filepath')
var.update(
file,
bumpversion=bumpversion,
prerelease=prerelease,
dependencies=dependencies_dict,
metadata=kwargs,
message=message)
new_version = var.get_latest_version()
if latest_version is None and new_version is not None:
bumpmsg = ' new version {} created.'.format(
new_version)
elif new_version != latest_version:
bumpmsg = ' version bumped {} --> {}.'.format(
latest_version, new_version)
elif var.versioned:
bumpmsg = ' version remains {}.'.format(latest_version)
else:
bumpmsg = ''
click.echo('uploaded data to {}.{}'.format(var, bumpmsg)) | python | {
"resource": ""
} |
q41471 | update_metadata | train | def update_metadata(ctx, archive_name):
'''
Update an archive's metadata
'''
_generate_api(ctx)
args, kwargs = _parse_args_and_kwargs(ctx.args)
assert len(args) == 0, 'Unrecognized arguments: "{}"'.format(args)
var = ctx.obj.api.get_archive(archive_name)
var.update_metadata(metadata=kwargs) | python | {
"resource": ""
} |
q41472 | set_dependencies | train | def set_dependencies(ctx, archive_name, dependency=None):
'''
Set the dependencies of an archive
'''
_generate_api(ctx)
kwargs = _parse_dependencies(dependency)
var = ctx.obj.api.get_archive(archive_name)
var.set_dependencies(dependencies=kwargs) | python | {
"resource": ""
} |
q41473 | get_dependencies | train | def get_dependencies(ctx, archive_name, version):
'''
List the dependencies of an archive
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
deps = []
dependencies = var.get_dependencies(version=version)
for arch, dep in dependencies.items():
if dep is None:
deps.append(arch)
else:
deps.append('{}=={}'.format(arch, dep))
click.echo('\n'.join(deps)) | python | {
"resource": ""
} |
q41474 | get_tags | train | def get_tags(ctx, archive_name):
'''
Print tags assigned to an archive
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
click.echo(' '.join(var.get_tags()), nl=False)
print('') | python | {
"resource": ""
} |
q41475 | download | train | def download(ctx, archive_name, filepath, version):
'''
Download an archive
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
if version is None:
version = var.get_default_version()
var.download(filepath, version=version)
archstr = var.archive_name +\
'' if (not var.versioned) else ' v{}'.format(version)
click.echo('downloaded{} to {}'.format(archstr, filepath)) | python | {
"resource": ""
} |
q41476 | cat | train | def cat(ctx, archive_name, version):
'''
Echo the contents of an archive
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
with var.open('r', version=version) as f:
for chunk in iter(lambda: f.read(1024 * 1024), ''):
click.echo(chunk) | python | {
"resource": ""
} |
q41477 | log | train | def log(ctx, archive_name):
'''
Get the version log for an archive
'''
_generate_api(ctx)
ctx.obj.api.get_archive(archive_name).log() | python | {
"resource": ""
} |
q41478 | metadata | train | def metadata(ctx, archive_name):
'''
Get an archive's metadata
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
click.echo(pprint.pformat(var.get_metadata())) | python | {
"resource": ""
} |
q41479 | history | train | def history(ctx, archive_name):
'''
Get archive history
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
click.echo(pprint.pformat(var.get_history())) | python | {
"resource": ""
} |
q41480 | versions | train | def versions(ctx, archive_name):
'''
Get an archive's versions
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
click.echo(pprint.pformat(map(str, var.get_versions()))) | python | {
"resource": ""
} |
q41481 | filter_archives | train | def filter_archives(ctx, prefix, pattern, engine):
'''
List all archives matching filter criteria
'''
_generate_api(ctx)
# want to achieve behavior like click.echo(' '.join(matches))
for i, match in enumerate(ctx.obj.api.filter(
pattern, engine, prefix=prefix)):
click.echo(match, nl=False)
print('') | python | {
"resource": ""
} |
q41482 | search | train | def search(ctx, tags, prefix=None):
'''
List all archives matching tag search criteria
'''
_generate_api(ctx)
for i, match in enumerate(ctx.obj.api.search(*tags, prefix=prefix)):
click.echo(match, nl=False)
print('') | python | {
"resource": ""
} |
q41483 | delete | train | def delete(ctx, archive_name):
'''
Delete an archive
'''
_generate_api(ctx)
var = ctx.obj.api.get_archive(archive_name)
var.delete()
click.echo('deleted archive {}'.format(var)) | python | {
"resource": ""
} |
q41484 | MapExecutor.replaceInCommand | train | def replaceInCommand(self,command, pattern, replacement, replacementAtBeginning):
"""
This is in internal method that replaces a certain 'pattern' in the
provided command with a 'replacement'.
A different replacement can be specified when the pattern occurs right
at the beginning of the command.
"""
# Turn the command into a list:
commandAsList = list(command)
# Get the indices of the pattern in the list:
indices = [index.start() for index in re.finditer(pattern, command)]
# Replace at the indices, unless the preceding character is the
# escape character:
for index in indices:
if index == 0:
commandAsList[index] = replacementAtBeginning
elif commandAsList[index-1] != MapConstants.escape_char:
commandAsList[index] = replacement
# Put the pieces of the new command together:
newCommand = ''.join(commandAsList)
# Remove superfluous slashes and return:
return newCommand.replace("//","/") | python | {
"resource": ""
} |
q41485 | MapExecutor.escapePlaceholders | train | def escapePlaceholders(self,inputString):
"""
This is an internal method that escapes all the placeholders
defined in MapConstants.py.
"""
escaped = inputString.replace(MapConstants.placeholder,'\\'+MapConstants.placeholder)
escaped = escaped.replace(MapConstants.placeholderFileName,'\\'+MapConstants.placeholderFileName)
escaped = escaped.replace(MapConstants.placeholderPath,'\\'+MapConstants.placeholderPath)
escaped = escaped.replace(MapConstants.placeholderExtension,'\\'+MapConstants.placeholderExtension)
escaped = escaped.replace(MapConstants.placeholderCounter,'\\'+MapConstants.placeholderCounter)
return escaped | python | {
"resource": ""
} |
q41486 | MapExecutor.buildCommand | train | def buildCommand(self,fileName,count,args):
"""
This is an internal method, building the command for a particular file.
"""
# Escape all placeholders in the file path:
fileNameWithPath = self.escapePlaceholders(fileName)
# The command is split into 'parts', which are separated by blank spaces:
commandParts = args.command.split(' ')
processedParts = []
# Each part of the command is processed separately:
for part in commandParts:
processedParts.append(self.buildPart(part,fileNameWithPath,count,args))
# The parts are put together at the end and the new command is returned:
return self.unescapePlaceholders(' '.join(processedParts)) | python | {
"resource": ""
} |
q41487 | MapExecutor.runCommands | train | def runCommands(self,commands,args):
"""
Given a list of commands, runCommands executes them.
This is one of the two key methods of MapExecutor.
"""
errorCounter = 0
if args.list:
print '\n'.join(commands)
else:
# Each command is executed sequentially:
for command in commands:
process = subprocess.Popen(command, stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)
stream = process.communicate()
output = stream[0]
erroroutput = stream[1]
returncode = process.returncode
if args.verbose:
print 'Executing command: '+command
if returncode != 0:
errorCounter = errorCounter + 1
if args.verbose or not args.ignore_errors:
print 'An error occurred:\n'
print erroroutput
if not args.ignore_errors:
print('Terminating map process.')
break
if returncode == 0 and len(output) > 0:
sys.stdout.write(output)
if args.verbose:
print 'Process completed successfully.'
if errorCounter > 0:
if errorCounter > 1:
print str(errorCounter) + ' errors occurred during the process.'
else:
print str(errorCounter) + ' error occurred during the process.' | python | {
"resource": ""
} |
q41488 | Pylon.validate | train | def validate(self, csdl, service='facebook'):
""" Validate the given CSDL
:param csdl: The CSDL to be validated for analysis
:type csdl: str
:param service: The service for this API call (facebook, etc)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
return self.request.post('validate', data=dict(csdl=csdl)) | python | {
"resource": ""
} |
q41489 | Pylon.start | train | def start(self, hash, name=None, service='facebook'):
""" Start a recording for the provided hash
:param hash: The hash to start recording with
:type hash: str
:param name: The name of the recording
:type name: str
:param service: The service for this API call (facebook, etc)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'hash': hash}
if name:
params['name'] = name
return self.request.post(service + '/start', params) | python | {
"resource": ""
} |
q41490 | Pylon.stop | train | def stop(self, id, service='facebook'):
""" Stop the recording for the provided id
:param id: The hash to start recording with
:type id: str
:param service: The service for this API call (facebook, etc)
:type service: str
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
return self.request.post(service + '/stop', data=dict(id=id)) | python | {
"resource": ""
} |
q41491 | Pylon.analyze | train | def analyze(self, id, parameters, filter=None, start=None, end=None,
service='facebook'):
""" Analyze the recorded data for a given hash
:param id: The id of the recording
:type id: str
:param parameters: To set settings such as threshold and target
:type parameters: dict
:param filter: An optional secondary filter
:type filter: str
:param start: Determines time period of the analyze
:type start: int
:param end: Determines time period of the analyze
:type end: int
:param service: The service for this API call (facebook, etc)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'id': id,
'parameters': parameters}
if filter:
params['filter'] = filter
if start:
params['start'] = start
if end:
params['end'] = end
return self.request.post(service + '/analyze', params) | python | {
"resource": ""
} |
q41492 | Pylon.list | train | def list(self, page=None, per_page=None, order_by='created_at',
order_dir='DESC', service='facebook'):
""" List pylon recordings
:param page: page number for pagination
:type page: int
:param per_page: number of items per page, default 20
:type per_page: int
:param order_by: field to order by, default request_time
:type order_by: str
:param order_dir: direction to order by, asc or desc, default desc
:type order_dir: str
:param service: The service for this API call (facebook, etc)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {}
if page:
params['page'] = page
if per_page:
params['per_page'] = per_page
if order_by:
params['order_by'] = order_by
if order_dir:
params['order_dir'] = order_dir
return self.request.get(service + '/get', params) | python | {
"resource": ""
} |
q41493 | Pylon.sample | train | def sample(self, id, count=None, start=None, end=None, filter=None,
service='facebook'):
""" Get sample interactions for a given hash
:param id: The hash to get tag analysis for
:type id: str
:param start: Determines time period of the sample data
:type start: int
:param end: Determines time period of the sample data
:type end: int
:param filter: An optional secondary filter
:type filter: str
:param service: The service for this API call (facebook, etc)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'id': id}
if count:
params['count'] = count
if start:
params['start'] = start
if end:
params['end'] = end
if filter:
params['filter'] = filter
return self.request.get(service + '/sample', params) | python | {
"resource": ""
} |
q41494 | Limit.get | train | def get(self, identity_id, service):
""" Get the limit for the given identity and service
:param identity_id: The ID of the identity to retrieve
:param service: The service that the limit is linked to
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
return self.request.get(str(identity_id) + '/limit/' + service) | python | {
"resource": ""
} |
q41495 | Limit.list | train | def list(self, service, per_page=20, page=1):
""" Get a list of limits for the given service
:param service: The service that the limit is linked to
:param per_page: The number of results per page returned
:param page: The page number of the results
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'per_page': per_page, 'page': page}
return self.request.get('limit/' + service, params) | python | {
"resource": ""
} |
q41496 | Limit.create | train | def create(self, identity_id, service, total_allowance=None, analyze_queries=None):
""" Create the limit
:param identity_id: The ID of the identity to retrieve
:param service: The service that the token is linked to
:param total_allowance: The total allowance for this token's limit
:param analyze_queries: The number of analyze calls
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'service': service}
if total_allowance is not None:
params['total_allowance'] = total_allowance
if analyze_queries is not None:
params['analyze_queries'] = analyze_queries
return self.request.post(str(identity_id) + '/limit/', params) | python | {
"resource": ""
} |
q41497 | Limit.update | train | def update(self, identity_id, service, total_allowance=None, analyze_queries=None):
""" Update the limit
:param identity_id: The ID of the identity to retrieve
:param service: The service that the token is linked to
:param total_allowance: The total allowance for this token's limit
:param analyze_queries: The number of analyze calls
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
params = {'service': service}
if total_allowance is not None:
params['total_allowance'] = total_allowance
if analyze_queries is not None:
params['analyze_queries'] = analyze_queries
return self.request.put(str(identity_id) + '/limit/' + service, params) | python | {
"resource": ""
} |
q41498 | Limit.delete | train | def delete(self, identity_id, service):
""" Delete the limit for the given identity and service
:param identity_id: The ID of the identity to retrieve
:param service: The service that the token is linked to
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:raises: :class:`~datasift.exceptions.DataSiftApiException`,
:class:`requests.exceptions.HTTPError`
"""
return self.request.delete(str(identity_id) + '/limit/' + service) | python | {
"resource": ""
} |
q41499 | reload | train | def reload(request):
"""Reload local requirements file."""
refresh_packages.clean()
refresh_packages.local()
refresh_packages.remote()
url = request.META.get('HTTP_REFERER')
if url:
return HttpResponseRedirect(url)
else:
return HttpResponse('Local requirements list has been reloaded.') | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.