_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q38400
Graph.base_uri
train
def base_uri(self): """ Resolution base for JSON schema. Also used as the default graph ID for RDF. """ if self._base_uri is None: if self._resolver is not None: self._base_uri = self.resolver.resolution_scope else: self._base_uri = 'http://pudo.github.io/jsongraph' return URIRef(self._base_uri)
python
{ "resource": "" }
q38401
Graph.resolver
train
def resolver(self): """ Resolver for JSON Schema references. This can be based around a file or HTTP-based resolution base URI. """ if self._resolver is None: self._resolver = RefResolver(self.base_uri, {}) # if self.base_uri not in self._resolver.store: # self._resolver.store[self.base_uri] = self.config return self._resolver
python
{ "resource": "" }
q38402
Graph.store
train
def store(self): """ Backend storage for RDF data. Either an in-memory store, or an external triple store controlled via SPARQL. """ if self._store is None: config = self.config.get('store', {}) if 'query' in config and 'update' in config: self._store = sparql_store(config.get('query'), config.get('update')) else: self._store = plugin.get('IOMemory', Store)() log.debug('Created store: %r', self._store) return self._store
python
{ "resource": "" }
q38403
Graph.graph
train
def graph(self): """ A conjunctive graph of all statements in the current instance. """ if not hasattr(self, '_graph') or self._graph is None: self._graph = ConjunctiveGraph(store=self.store, identifier=self.base_uri) return self._graph
python
{ "resource": "" }
q38404
Graph.buffered
train
def buffered(self): """ Whether write operations should be buffered, i.e. run against a local graph before being stored to the main data store. """ if 'buffered' not in self.config: return not isinstance(self.store, (Memory, IOMemory)) return self.config.get('buffered')
python
{ "resource": "" }
q38405
Graph.register
train
def register(self, alias, uri): """ Register a new schema URI under a given name. """ # TODO: do we want to constrain the valid alias names. if isinstance(uri, dict): id = uri.get('id', alias) self.resolver.store[id] = uri uri = id self.aliases[alias] = uri
python
{ "resource": "" }
q38406
Graph.get_uri
train
def get_uri(self, alias): """ Get the URI for a given alias. A registered URI will return itself, otherwise ``None`` is returned. """ if alias in self.aliases.keys(): return self.aliases[alias] if alias in self.aliases.values(): return alias raise GraphException('No such schema: %r' % alias)
python
{ "resource": "" }
q38407
is_url
train
def is_url(text): """ Check if the given text looks like a URL. """ if text is None: return False text = text.lower() return text.startswith('http://') or text.startswith('https://') or \ text.startswith('urn:') or text.startswith('file://')
python
{ "resource": "" }
q38408
safe_uriref
train
def safe_uriref(text): """ Escape a URL properly. """ url_ = url.parse(text).sanitize().deuserinfo().canonical() return URIRef(url_.punycode().unicode())
python
{ "resource": "" }
q38409
KeplerFov.computePointing
train
def computePointing(self, ra_deg, dec_deg, roll_deg, cartesian=False): """Compute a pointing model without changing the internal object pointing""" # Roll FOV Rrotate = r.rotateInXMat(roll_deg) # Roll # Slew from ra/dec of zero Ra = r.rightAscensionRotationMatrix(ra_deg) Rd = r.declinationRotationMatrix(dec_deg) Rslew = np.dot(Ra, Rd) R = np.dot(Rslew, Rrotate) slew = self.origin*1 for i, row in enumerate(self.origin): slew[i, 3:6] = np.dot(R, row[3:6]) if cartesian is False: slew = self.getRaDecs(slew) return slew
python
{ "resource": "" }
q38410
KeplerFov.getRaDecs
train
def getRaDecs(self, mods): """Internal function converting cartesian coords to ra dec""" raDecOut = np.empty( (len(mods), 5)) raDecOut[:,0:3] = mods[:,0:3] for i, row in enumerate(mods): raDecOut[i, 3:5] = r.raDecFromVec(row[3:6]) return raDecOut
python
{ "resource": "" }
q38411
KeplerFov.isOnSilicon
train
def isOnSilicon(self, ra_deg, dec_deg, padding_pix=DEFAULT_PADDING): """Returns True if the given location is observable with a science CCD. Parameters ---------- ra_deg : float Right Ascension (J2000) in decimal degrees. dec_deg : float Declination (J2000) in decimal degrees. padding : float Objects <= this many pixels off the edge of a channel are counted as inside. This allows one to compensate for the slight inaccuracy in `K2fov` that results from e.g. the lack of optical distortion modeling. """ ch, col, row = self.getChannelColRow(ra_deg, dec_deg) # Modules 3 and 7 are no longer operational if ch in self.brokenChannels: return False # K2fov encodes the Fine Guidance Sensors (FGS) as # "channel" numbers 85-88; they are not science CCDs. if ch > 84: return False return self.colRowIsOnSciencePixel(col, row, padding_pix)
python
{ "resource": "" }
q38412
KeplerFov.getAllChannelsAsPolygons
train
def getAllChannelsAsPolygons(self, maptype=None): """Return slew the telescope and return the corners of the modules as Polygon objects. If a projection is supplied, the ras and decs are mapped onto x, y using that projection """ polyList = [] for ch in self.origin[:, 2]: poly = self.getChannelAsPolygon(ch, maptype) polyList.append(poly) return polyList
python
{ "resource": "" }
q38413
KeplerFov.plotPointing
train
def plotPointing(self, maptype=None, colour='b', mod3='r', showOuts=True, **kwargs): """Plot the FOV """ if maptype is None: maptype=self.defaultMap radec = self.currentRaDec for ch in radec[:,2][::4]: idx = np.where(radec[:,2].astype(np.int) == ch)[0] idx = np.append(idx, idx[0]) #% points to draw a box c = colour if ch in self.brokenChannels: c = mod3 maptype.plot(radec[idx, 3], radec[idx, 4], '-', color=c, **kwargs) #Show the origin of the col and row coords for this ch if showOuts: maptype.plot(radec[idx[0], 3], radec[idx[0],4], 'o', color=c)
python
{ "resource": "" }
q38414
KeplerFov.plotOutline
train
def plotOutline(self, maptype=None, colour='#AAAAAA', **kwargs): """Plot an outline of the FOV. """ if maptype is None: maptype=self.defaultMap xarr = [] yarr = [] radec = self.currentRaDec for ch in [20,4,11,28,32, 71,68, 84, 75, 60, 56, 15 ]: idx = np.where(radec[:,2].astype(np.int) == ch)[0] idx = idx[0] #Take on the first one x, y = maptype.skyToPix(radec[idx][3], radec[idx][4]) xarr.append(x) yarr.append(y) verts = np.empty( (len(xarr), 2)) verts[:,0] = xarr verts[:,1] = yarr #There are two ways to specify line colour ec = kwargs.pop('ec', "none") ec = kwargs.pop('edgecolor', ec) p = matplotlib.patches.Polygon(verts, fill=True, ec=ec, fc=colour, **kwargs) mp.gca().add_patch(p)
python
{ "resource": "" }
q38415
KeplerFov.plotSpacecraftYAxis
train
def plotSpacecraftYAxis(self, maptype=None): """Plot a line pointing in the direction of the spacecraft y-axis (i.e normal to the solar panel """ if maptype is None: maptype=self.defaultMap #Plot direction of spacecraft +y axis. The subtraction of #90 degrees accounts for the different defintions of where #zero roll is. yAngle_deg = getSpacecraftRollAngleFromFovAngle(self.roll0_deg) yAngle_deg -=90 a,d = gcircle.sphericalAngDestination(self.ra0_deg, self.dec0_deg, -yAngle_deg, 12.0) x0, y0 = maptype.skyToPix(self.ra0_deg, self.dec0_deg) x1, y1 = maptype.skyToPix(a, d) mp.plot([x0, x1], [y0, y1], 'k-')
python
{ "resource": "" }
q38416
KeplerFov.plotChIds
train
def plotChIds(self, maptype=None, modout=False): """Print the channel numbers on the plotting display Note: --------- This method will behave poorly if you are plotting in mixed projections. Because the channel vertex polygons are already projected using self.defaultMap, applying this function when plotting in a different reference frame may cause trouble. """ if maptype is None: maptype = self.defaultMap polyList = self.getAllChannelsAsPolygons(maptype) for p in polyList: p.identifyModule(modout=modout)
python
{ "resource": "" }
q38417
Polygon.isPointInside
train
def isPointInside(self, xp, yp): """Is the given point inside the polygon? Input: ------------ xp, yp (floats) Coordinates of point in same units that array vertices are specified when object created. Returns: ----------- **True** / **False** """ point = np.array([xp, yp]).transpose() polygon = self.polygon numVert, numDim = polygon.shape #Subtract each point from the previous one. polyVec = np.roll(polygon, -1, 0) - polygon #Get the vector from each vertex to the given point pointVec = point - polygon crossProduct = np.cross(polyVec, pointVec) if np.all(crossProduct < 0) or np.all(crossProduct > 0): return True return False
python
{ "resource": "" }
q38418
Polygon.draw
train
def draw(self, **kwargs): """Draw the polygon Optional Inputs: ------------ All optional inputs are passed to ``matplotlib.patches.Polygon`` Notes: --------- Does not accept maptype as an argument. """ ax = mp.gca() shape = matplotlib.patches.Polygon(self.polygon, **kwargs) ax.add_artist(shape)
python
{ "resource": "" }
q38419
IEC60488.parse_response
train
def parse_response(self, response, header=None): """Parses the response message. The following graph shows the structure of response messages. :: +----------+ +--+ data sep +<-+ | +----------+ | | | +--------+ +------------+ | +------+ | +-->| header +------->+ header sep +---+--->+ data +----+----+ | +--------+ +------------+ +------+ | | | --+ +----------+ +--> | +--+ data sep +<-+ | | | +----------+ | | | | | | | | +------+ | | +--------------------------------------+--->+ data +----+----+ +------+ """ response = response.decode(self.encoding) if header: header = "".join((self.resp_prefix, header, self.resp_header_sep)) if not response.startswith(header): raise IEC60488.ParsingError('Response header mismatch') response = response[len(header):] return response.split(self.resp_data_sep)
python
{ "resource": "" }
q38420
IEC60488.trigger
train
def trigger(self, transport): """Triggers the transport.""" logger.debug('IEC60488 trigger') with transport: try: transport.trigger() except AttributeError: trigger_msg = self.create_message('*TRG') transport.write(trigger_msg)
python
{ "resource": "" }
q38421
IEC60488.clear
train
def clear(self, transport): """Issues a device clear command.""" logger.debug('IEC60488 clear') with transport: try: transport.clear() except AttributeError: clear_msg = self.create_message('*CLS') transport.write(clear_msg)
python
{ "resource": "" }
q38422
SignalRecovery.query_bytes
train
def query_bytes(self, transport, num_bytes, header, *data): """Queries for binary data :param transport: A transport object. :param num_bytes: The exact number of data bytes expected. :param header: The message header. :param data: Optional data. :returns: The raw unparsed data bytearray. """ message = self.create_message(header, *data) logger.debug('SignalRecovery query bytes: %r', message) with transport: transport.write(message) response = transport.read_exactly(num_bytes) logger.debug('SignalRecovery response: %r', response) # We need to read 3 bytes, because there is a \0 character # separating the data from the status bytes. _, status_byte, overload_byte = transport.read_exactly(3) logger.debug('SignalRecovery stb: %r olb: %r', status_byte, overload_byte) self.call_byte_handler(status_byte, overload_byte) # returns raw unparsed bytes. return response
python
{ "resource": "" }
q38423
rotateAroundVector
train
def rotateAroundVector(v1, w, theta_deg): """Rotate vector v1 by an angle theta around w Taken from https://en.wikipedia.org/wiki/Axis%E2%80%93angle_representation (see Section "Rotating a vector") Notes: Rotating the x axis 90 degrees about the y axis gives -z Rotating the x axis 90 degrees about the z axis gives +y """ ct = np.cos(np.radians(theta_deg)) st = np.sin(np.radians(theta_deg)) term1 = v1*ct term2 = np.cross(w, v1) * st term3 = np.dot(w, v1) term3 = w * term3 * (1-ct) return term1 + term2 + term3
python
{ "resource": "" }
q38424
rotateInDeclination
train
def rotateInDeclination(v1, theta_deg): """Rotation is chosen so a rotation of 90 degrees from zenith ends up at ra=0, dec=0""" axis = np.array([0,-1,0]) return rotateAroundVector(v1, axis, theta_deg)
python
{ "resource": "" }
q38425
MPS4G.sweep
train
def sweep(self, mode, speed=None): """Starts the output current sweep. :param mode: The sweep mode. Valid entries are `'UP'`, `'DOWN'`, `'PAUSE'`or `'ZERO'`. If in shim mode, `'LIMIT'` is valid as well. :param speed: The sweeping speed. Valid entries are `'FAST'`, `'SLOW'` or `None`. """ sweep_modes = ['UP', 'DOWN', 'PAUSE', 'ZERO', 'LIMIT'] sweep_speed = ['SLOW', 'FAST', None] if not mode in sweep_modes: raise ValueError('Invalid sweep mode.') if not speed in sweep_speed: raise ValueError('Invalid sweep speed.') if speed is None: self._write('SWEEP {0}'.format(mode)) else: self._write('SWEEP {0} {1}'.format(mode, speed))
python
{ "resource": "" }
q38426
BaseEventTransport.run_on_main_thread
train
def run_on_main_thread(self, func, args=None, kwargs=None): """ Runs the ``func`` callable on the main thread, by using the provided microservice instance's IOLoop. :param func: callable to run on the main thread :param args: tuple or list with the positional arguments. :param kwargs: dict with the keyword arguments. :return: """ if not args: args = () if not kwargs: kwargs = {} self.microservice.get_io_loop().add_callback(func, *args, **kwargs)
python
{ "resource": "" }
q38427
rotateAboutVectorMatrix
train
def rotateAboutVectorMatrix(vec, theta_deg): """Construct the matrix that rotates vector a about vector vec by an angle of theta_deg degrees Taken from http://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle Input: theta_deg (float) Angle through which vectors should be rotated in degrees Returns: A matrix To rotate a vector, premultiply by this matrix. To rotate the coord sys underneath the vector, post multiply """ ct = np.cos(np.radians(theta_deg)) st = np.sin(np.radians(theta_deg)) # Ensure vector has normal length vec /= np.linalg.norm(vec) assert( np.all( np.isfinite(vec))) # compute the three terms term1 = ct * np.eye(3) ucross = np.zeros( (3,3)) ucross[0] = [0, -vec[2], vec[1]] ucross[1] = [vec[2], 0, -vec[0]] ucross[2] = [-vec[1], vec[0], 0] term2 = st*ucross ufunny = np.zeros( (3,3)) for i in range(0,3): for j in range(i,3): ufunny[i,j] = vec[i]*vec[j] ufunny[j,i] = ufunny[i,j] term3 = (1-ct) * ufunny return term1 + term2 + term3
python
{ "resource": "" }
q38428
rotateInZMat
train
def rotateInZMat(theta_deg): """Rotate a vector theta degrees around the z-axis Equivalent to yaw left Rotates the vector in the sense that the x-axis is rotated towards the y-axis. If looking along the z-axis (which is not the way you usually look at it), the vector rotates clockwise. If sitting on the vector [1,0,0], the rotation is towards the left Input: theta_deg (float) Angle through which vectors should be rotated in degrees Returns: A matrix To rotate a vector, premultiply by this matrix. To rotate the coord sys underneath the vector, post multiply """ ct = np.cos( np.radians(theta_deg)) st = np.sin( np.radians(theta_deg)) rMat = np.array([ [ ct, -st, 0], [ st, ct, 0], [ 0, 0, 1], ]) return rMat
python
{ "resource": "" }
q38429
SqlDAM.getReadSession
train
def getReadSession(self): ''' return scopted session ''' if self.ReadSession is None: self.ReadSession=scoped_session(sessionmaker(bind=self.engine)) return self.ReadSession
python
{ "resource": "" }
q38430
SqlDAM.readTupleQuotes
train
def readTupleQuotes(self, symbol, start, end): ''' read quotes as tuple ''' if end is None: end=sys.maxint session=self.getReadSession()() try: rows=session.query(Quote).filter(and_(Quote.symbol == symbol, Quote.time >= int(start), Quote.time < int(end))) finally: self.getReadSession().remove() return rows
python
{ "resource": "" }
q38431
SqlDAM.readBatchTupleQuotes
train
def readBatchTupleQuotes(self, symbols, start, end): ''' read batch quotes as tuple to save memory ''' if end is None: end=sys.maxint ret={} session=self.getReadSession()() try: symbolChunks=splitListEqually(symbols, 100) for chunk in symbolChunks: rows=session.query(Quote.symbol, Quote.time, Quote.close, Quote.volume, Quote.low, Quote.high).filter(and_(Quote.symbol.in_(chunk), Quote.time >= int(start), Quote.time < int(end))) for row in rows: if row.time not in ret: ret[row.time]={} ret[row.time][row.symbol]=self.__sqlToTupleQuote(row) finally: self.getReadSession().remove() return ret
python
{ "resource": "" }
q38432
SqlDAM.read_tuple_ticks
train
def read_tuple_ticks(self, symbol, start, end): ''' read ticks as tuple ''' if end is None: end=sys.maxint session=self.getReadSession()() try: rows=session.query(Tick).filter(and_(Tick.symbol == symbol, Tick.time >= int(start), Tick.time < int(end))) finally: self.getReadSession().remove() return [self.__sqlToTupleTick(row) for row in rows]
python
{ "resource": "" }
q38433
SqlDAM._fundamentalToSqls
train
def _fundamentalToSqls(self, symbol, keyTimeValueDict): ''' convert fundament dict to sqls ''' sqls=[] for key, timeValues in keyTimeValueDict.iteritems(): for timeStamp, value in timeValues.iteritems(): sqls.append(FmSql(symbol, key, timeStamp, value)) return sqls
python
{ "resource": "" }
q38434
Curve.delete
train
def delete(self): """Deletes the current curve. :raises RuntimeError: Raises when` when one tries to delete a read-only curve. """ if self._writeable: self._write(('CRVDEL', Integer), self.idx) else: raise RuntimeError('Can not delete read-only curves.')
python
{ "resource": "" }
q38435
Program.line
train
def line(self, idx): """Return the i'th program line. :param i: The i'th program line. """ # TODO: We should parse the response properly. return self._query(('PGM?', [Integer, Integer], String), self.idx, idx)
python
{ "resource": "" }
q38436
Program.append_line
train
def append_line(self, new_line): """Appends the new_line to the LS340 program.""" # TODO: The user still has to write the raw line, this is error prone. self._write(('PGM', [Integer, String]), self.idx, new_line)
python
{ "resource": "" }
q38437
LS340.softcal
train
def softcal(self, std, dest, serial, T1, U1, T2, U2, T3=None, U3=None): """Generates a softcal curve. :param std: The standard curve index used to calculate the softcal curve. Valid entries are 1-20 :param dest: The user curve index where the softcal curve is stored. Valid entries are 21-60. :param serial: The serial number of the new curve. A maximum of 10 characters is allowed. :param T1: The first temperature point. :param U1: The first sensor units point. :param T2: The second temperature point. :param U2: The second sensor units point. :param T3: The third temperature point. Default: `None`. :param U3: The third sensor units point. Default: `None`. """ args = [std, dest, serial, T1, U1, T2, U2] dtype = [Integer(min=1, max=21), Integer(min=21, max=61), String(max=10), Float, Float, Float, Float] if (T3 is not None) and (U3 is not None): args.extend([T3, U3]) dtype.extend([Float, Float]) self._write(('SCAL', dtype), *args)
python
{ "resource": "" }
q38438
VIPPersonContactType.getParameters
train
def getParameters(self, contactItem): """ Return a list containing a single parameter suitable for changing the VIP status of a person. @type contactItem: L{_PersonVIPStatus} @rtype: C{list} of L{liveform.Parameter} """ isVIP = False # default if contactItem is not None: isVIP = contactItem.person.vip return [liveform.Parameter( 'vip', liveform.CHECKBOX_INPUT, bool, 'VIP', default=isVIP)]
python
{ "resource": "" }
q38439
Person.getEmailAddresses
train
def getEmailAddresses(self): """ Return an iterator of all email addresses associated with this person. @return: an iterator of unicode strings in RFC2822 address format. """ return self.store.query( EmailAddress, EmailAddress.person == self).getColumn('address')
python
{ "resource": "" }
q38440
Organizer.groupReadOnlyViews
train
def groupReadOnlyViews(self, person): """ Collect all contact items from the available contact types for the given person, organize them by contact group, and turn them into read-only views. @type person: L{Person} @param person: The person whose contact items we're interested in. @return: A mapping of of L{ContactGroup} names to the read-only views of their member contact items, with C{None} being the key for groupless contact items. @rtype: C{dict} of C{str} """ # this is a slightly awkward, specific API, but at the time of # writing, read-only views are the thing that the only caller cares # about. we need the contact type to get a read-only view for a # contact item. there is no way to get from a contact item to a # contact type, so this method can't be "groupContactItems" (which # seems to make more sense), unless it returned some weird data # structure which managed to associate contact items and contact # types. grouped = {} for contactType in self.getContactTypes(): for contactItem in contactType.getContactItems(person): contactGroup = contactType.getContactGroup(contactItem) if contactGroup is not None: contactGroup = contactGroup.groupName if contactGroup not in grouped: grouped[contactGroup] = [] grouped[contactGroup].append( contactType.getReadOnlyView(contactItem)) return grouped
python
{ "resource": "" }
q38441
PersonPluginView.getPluginWidget
train
def getPluginWidget(self, pluginName): """ Return the named plugin's view. @type pluginName: C{unicode} @param pluginName: The name of the plugin. @rtype: L{LiveElement} """ # this will always pick the first plugin with pluginName if there is # more than one. don't do that. for plugin in self.plugins: if _organizerPluginName(plugin) == pluginName: view = self._toLiveElement( plugin.personalize(self.person)) view.setFragmentParent(self) return view
python
{ "resource": "" }
q38442
Mugshot.makeThumbnail
train
def makeThumbnail(cls, inputFile, person, format, smaller): """ Make a thumbnail of a mugshot image and store it on disk. @param inputFile: The image to thumbnail. @type inputFile: C{file} @param person: The person this mugshot thumbnail is associated with. @type person: L{Person} @param format: The format of the data in C{inputFile}. @type format: C{str} (e.g. I{jpeg}) @param smaller: Thumbnails are available in two sizes. if C{smaller} is C{True}, then the thumbnail will be in the smaller of the two sizes. @type smaller: C{bool} @return: path to the thumbnail. @rtype: L{twisted.python.filepath.FilePath} """ dirsegs = ['mugshots', str(person.storeID)] if smaller: dirsegs.insert(1, 'smaller') size = cls.smallerSize else: size = cls.size atomicOutputFile = person.store.newFile(*dirsegs) makeThumbnail(inputFile, atomicOutputFile, size, format) atomicOutputFile.close() return atomicOutputFile.finalpath
python
{ "resource": "" }
q38443
Extension.post_build
train
def post_build(self, container_builder, container): """ Register filter and global in jinja environment instance IoC tags are: - jinja2.filter to register filter, the tag must contain a name and a method options - jinja2.global to add new global, here globals are functions. The tag must contain a name and a method options """ jinja = container.get('ioc.extra.jinja2') for id in container_builder.get_ids_by_tag('jinja2.filter'): definition = container_builder.get(id) for option in definition.get_tag('jinja2.filter'): if 'name' not in option: break if 'method' not in option: break jinja.filters[option['name']] = getattr(container.get(id), option['method']) for id in container_builder.get_ids_by_tag('jinja2.global'): definition = container_builder.get(id) for option in definition.get_tag('jinja2.global'): if 'name' not in option: break if 'method' not in option: break jinja.globals[option['name']] = getattr(container.get(id), option['method'])
python
{ "resource": "" }
q38444
cache_key
train
def cache_key(*args, **kwargs): """ Base method for computing the cache key with respect to the given arguments. """ key = "" for arg in args: if callable(arg): key += ":%s" % repr(arg) else: key += ":%s" % str(arg) return key
python
{ "resource": "" }
q38445
decode_values
train
def decode_values(fct): ''' Decode base64 encoded responses from Consul storage ''' def inner(*args, **kwargs): ''' decorator ''' data = fct(*args, **kwargs) if 'error' not in data: for result in data: result['Value'] = base64.b64decode(result['Value']) return data return inner
python
{ "resource": "" }
q38446
safe_request
train
def safe_request(fct): ''' Return json messages instead of raising errors ''' def inner(*args, **kwargs): ''' decorator ''' try: _data = fct(*args, **kwargs) except requests.exceptions.ConnectionError as error: return {'error': str(error), 'status': 404} if _data.ok: if _data.content: safe_data = _data.json() else: safe_data = {'success': True} else: safe_data = {'error': _data.reason, 'status': _data.status_code} return safe_data return inner
python
{ "resource": "" }
q38447
parseAddress
train
def parseAddress(address): """ Parse the given RFC 2821 email address into a structured object. @type address: C{str} @param address: The address to parse. @rtype: L{Address} @raise xmantissa.error.ArgumentError: The given string was not a valid RFC 2821 address. """ parts = [] parser = _AddressParser() end = parser(parts, address) if end != len(address): raise InvalidTrailingBytes() return parts[0]
python
{ "resource": "" }
q38448
MicroService.start
train
def start(self): """ The main method that starts the service. This is blocking. """ self._initial_setup() self.on_service_start() self.app = self.make_tornado_app() enable_pretty_logging() self.app.listen(self.port, address=self.host) self._start_periodic_tasks() # starts the event handlers self._initialize_event_handlers() self._start_event_handlers() try: self.io_loop.start() except RuntimeError: # TODO : find a way to check if the io_loop is running before trying to start it # this method to check if the loop is running is ugly pass
python
{ "resource": "" }
q38449
MicroService.get_plugin
train
def get_plugin(self, name): """ Returns a plugin by name and raises ``gemstone.errors.PluginDoesNotExistError`` error if no plugin with such name exists. :param name: a string specifying a plugin name. :return: the corresponding plugin instance. """ for plugin in self.plugins: if plugin.name == name: return plugin raise PluginDoesNotExistError("Plugin '{}' not found".format(name))
python
{ "resource": "" }
q38450
MicroService.start_thread
train
def start_thread(self, target, args, kwargs): """ Shortcut method for starting a thread. :param target: The function to be executed. :param args: A tuple or list representing the positional arguments for the thread. :param kwargs: A dictionary representing the keyword arguments. .. versionadded:: 0.5.0 """ thread_obj = threading.Thread(target=target, args=args, kwargs=kwargs, daemon=True) thread_obj.start()
python
{ "resource": "" }
q38451
MicroService.emit_event
train
def emit_event(self, event_name, event_body): """ Publishes an event of type ``event_name`` to all subscribers, having the body ``event_body``. The event is pushed through all available event transports. The event body must be a Python object that can be represented as a JSON. :param event_name: a ``str`` representing the event type :param event_body: a Python object that can be represented as JSON. .. versionadded:: 0.5.0 .. versionchanged:: 0.10.0 Added parameter broadcast """ for transport in self.event_transports: transport.emit_event(event_name, event_body)
python
{ "resource": "" }
q38452
MicroService._add_static_handlers
train
def _add_static_handlers(self, handlers): """ Creates and adds the handles needed for serving static files. :param handlers: """ for url, path in self.static_dirs: handlers.append((url.rstrip("/") + "/(.*)", StaticFileHandler, {"path": path}))
python
{ "resource": "" }
q38453
MicroService._gather_event_handlers
train
def _gather_event_handlers(self): """ Searches for the event handlers in the current microservice class. :return: """ self._extract_event_handlers_from_container(self) for module in self.modules: self._extract_event_handlers_from_container(module)
python
{ "resource": "" }
q38454
connect_if_correct_version
train
def connect_if_correct_version(db_path, version): """Return a sqlite3 database connection if the version in the database's metadata matches the version argument. Also implicitly checks for whether the data in this database has been completely filled, since we set the version last. TODO: Make an explicit 'complete' flag to the metadata. """ db = Database(db_path) if db.has_version() and db.version() == version: return db.connection return None
python
{ "resource": "" }
q38455
_create_cached_db
train
def _create_cached_db( db_path, tables, version=1): """ Either create or retrieve sqlite database. Parameters -------- db_path : str Path to sqlite3 database file tables : dict Dictionary mapping table names to datacache.DatabaseTable objects version : int, optional Version acceptable as cached data. Returns sqlite3 connection """ require_string(db_path, "db_path") require_iterable_of(tables, DatabaseTable) require_integer(version, "version") # if the database file doesn't already exist and we encounter an error # later, delete the file before raising an exception delete_on_error = not exists(db_path) # if the database already exists, contains all the table # names and has the right version, then just return it db = Database(db_path) # make sure to delete the database file in case anything goes wrong # to avoid leaving behind an empty DB table_names = [table.name for table in tables] try: if db.has_tables(table_names) and \ db.has_version() and \ db.version() == version: logger.info("Found existing table in database %s", db_path) else: if len(db.table_names()) > 0: logger.info( "Dropping tables from database %s: %s", db_path, ", ".join(db.table_names())) db.drop_all_tables() logger.info( "Creating database %s containing: %s", db_path, ", ".join(table_names)) db.create(tables, version) except: logger.warning( "Failed to create tables %s in database %s", table_names, db_path) db.close() if delete_on_error: remove(db_path) raise return db.connection
python
{ "resource": "" }
q38456
db_from_dataframe
train
def db_from_dataframe( db_filename, table_name, df, primary_key=None, subdir=None, overwrite=False, indices=(), version=1): """ Given a dataframe `df`, turn it into a sqlite3 database. Store values in a table called `table_name`. Returns full path to the sqlite database file. """ return db_from_dataframes( db_filename=db_filename, dataframes={table_name: df}, primary_keys={table_name: primary_key}, indices={table_name: indices}, subdir=subdir, overwrite=overwrite, version=version)
python
{ "resource": "" }
q38457
_db_filename_from_dataframe
train
def _db_filename_from_dataframe(base_filename, df): """ Generate database filename for a sqlite3 database we're going to fill with the contents of a DataFrame, using the DataFrame's column names and types. """ db_filename = base_filename + ("_nrows%d" % len(df)) for column_name in df.columns: column_db_type = db_type(df[column_name].dtype) column_name = column_name.replace(" ", "_") db_filename += ".%s_%s" % (column_name, column_db_type) return db_filename + ".db"
python
{ "resource": "" }
q38458
fetch_csv_db
train
def fetch_csv_db( table_name, download_url, csv_filename=None, db_filename=None, subdir=None, version=1, **pandas_kwargs): """ Download a remote CSV file and create a local sqlite3 database from its contents """ df = fetch_csv_dataframe( download_url=download_url, filename=csv_filename, subdir=subdir, **pandas_kwargs) base_filename = splitext(csv_filename)[0] if db_filename is None: db_filename = _db_filename_from_dataframe(base_filename, df) return db_from_dataframe( db_filename, table_name, df, subdir=subdir, version=version)
python
{ "resource": "" }
q38459
GoogleFinance.get_all
train
def get_all(self, security): """ Get all available quote data for the given ticker security. Returns a dictionary. """ url = 'http://www.google.com/finance?q=%s' % security page = self._request(url) soup = BeautifulSoup(page) snapData = soup.find("table", {"class": "snap-data"}) if snapData is None: raise UfException(Errors.STOCK_SYMBOL_ERROR, "Can find data for stock %s, security error?" % security) data = {} for row in snapData.findAll('tr'): keyTd, valTd = row.findAll('td') data[keyTd.getText()] = valTd.getText() return data
python
{ "resource": "" }
q38460
GoogleFinance.quotes
train
def quotes(self, security, start, end): """ Get historical prices for the given ticker security. Date format is 'YYYYMMDD' Returns a nested list. """ try: url = 'http://www.google.com/finance/historical?q=%s&startdate=%s&enddate=%s&output=csv' % (security.symbol, start, end) try: page = self._request(url) except UfException as ufExcep: # if symol is not right, will get 400 if Errors.NETWORK_400_ERROR == ufExcep.getCode: raise UfException(Errors.STOCK_SYMBOL_ERROR, "Can find data for stock %s, security error?" % security) raise ufExcep days = page.readlines() values = [day.split(',') for day in days] # sample values:[['Date', 'Open', 'High', 'Low', 'Close', 'Volume'], \ # ['2009-12-31', '112.77', '112.80', '111.39', '111.44', '90637900']...] for value in values[1:]: date = convertGoogCSVDate(value[0]) try: yield Quote(date, value[1].strip(), value[2].strip(), value[3].strip(), value[4].strip(), value[5].strip(), None) except Exception: LOG.warning("Exception when processing %s at date %s for value %s" % (security, date, value)) except BaseException: raise UfException(Errors.UNKNOWN_ERROR, "Unknown Error in GoogleFinance.getHistoricalPrices %s" % traceback.format_exc())
python
{ "resource": "" }
q38461
GoogleFinance._parseTarget
train
def _parseTarget(self, target, keyTimeValue): ''' parse table for get financial ''' table = target.table timestamps = self._getTimeStamps(table) for tr in table.tbody.findChildren('tr'): for i, td in enumerate(tr.findChildren('td')): if 0 == i: key = td.getText() if key not in keyTimeValue: keyTimeValue[key] = {} else: keyTimeValue[key][timestamps[i - 1]] = self._getValue(td)
python
{ "resource": "" }
q38462
GoogleFinance._getTimeStamps
train
def _getTimeStamps(self, table): ''' get time stamps ''' timeStamps = [] for th in table.thead.tr.contents: if '\n' != th: timeStamps.append(th.getText()) return timeStamps[1:]
python
{ "resource": "" }
q38463
GoogleFinance.ticks
train
def ticks(self, security, start, end): """ Get tick prices for the given ticker security. @security: stock security @interval: interval in mins(google finance only support query till 1 min) @start: start date(YYYYMMDD) @end: end date(YYYYMMDD) start and end is disabled since only 15 days data will show @Returns a nested list. """ period = 1 # url = 'http://www.google.com/finance/getprices?q=%s&i=%s&p=%sd&f=d,o,h,l,c,v&ts=%s' % (security, interval, period, start) url = 'http://www.google.com/finance/getprices?q=%s&i=61&p=%sd&f=d,o,h,l,c,v' % (security.symbol, period) LOG.debug('fetching {0}'.format(url)) try: response = self._request(url) except UfException as ufExcep: # if symol is not right, will get 400 if Errors.NETWORK_400_ERROR == ufExcep.getCode: raise UfException(Errors.STOCK_SYMBOL_ERROR, "Can find data for stock %s, security error?" % security) raise ufExcep # use csv reader here days = response.text.split('\n')[7:] # first 7 line is document # sample values:'a1316784600,31.41,31.5,31.4,31.43,150911' values = [day.split(',') for day in days if len(day.split(',')) >= 6] for value in values: yield json.dumps({'date': value[0][1:].strip(), 'close': value[1].strip(), 'high': value[2].strip(), 'low': value[3].strip(), 'open': value[4].strip(), 'volume': value[5].strip()})
python
{ "resource": "" }
q38464
GraphOperations.get_binding
train
def get_binding(self, schema, data): """ For a given schema, get a binding mediator providing links to the RDF terms matching that schema. """ schema = self.parent.get_schema(schema) return Binding(schema, self.parent.resolver, data=data)
python
{ "resource": "" }
q38465
GraphOperations.get
train
def get(self, id, depth=3, schema=None): """ Construct a single object based on its ID. """ uri = URIRef(id) if schema is None: for o in self.graph.objects(subject=uri, predicate=RDF.type): schema = self.parent.get_schema(str(o)) if schema is not None: break else: schema = self.parent.get_schema(schema) binding = self.get_binding(schema, None) return self._objectify(uri, binding, depth=depth, path=set())
python
{ "resource": "" }
q38466
BaseOAIRELoader.get_text_node
train
def get_text_node(self, tree, xpath_str): """Return a text node from given XML tree given an lxml XPath.""" try: text = tree.xpath(xpath_str, namespaces=self.namespaces)[0].text return text_type(text) if text else '' except IndexError: # pragma: nocover return ''
python
{ "resource": "" }
q38467
BaseOAIRELoader.get_subtree
train
def get_subtree(self, tree, xpath_str): """Return a subtree given an lxml XPath.""" return tree.xpath(xpath_str, namespaces=self.namespaces)
python
{ "resource": "" }
q38468
BaseOAIRELoader.fundertree2json
train
def fundertree2json(self, tree, oai_id): """Convert OpenAIRE's funder XML to JSON.""" try: tree = self.get_subtree(tree, 'fundingtree')[0] except IndexError: # pragma: nocover pass funder_node = self.get_subtree(tree, 'funder') subfunder_node = self.get_subtree(tree, '//funding_level_0') funder_id = self.get_text_node(funder_node[0], './id') \ if funder_node else None subfunder_id = self.get_text_node(subfunder_node[0], './id') \ if subfunder_node else None funder_name = self.get_text_node(funder_node[0], './shortname') \ if funder_node else "" subfunder_name = self.get_text_node(subfunder_node[0], './name') \ if subfunder_node else "" # Try to resolve the subfunder first, on failure try to resolve the # main funder, on failure raise an error. funder_doi_url = None if subfunder_id: funder_doi_url = self.funder_resolver.resolve_by_id(subfunder_id) if not funder_doi_url: if funder_id: funder_doi_url = self.funder_resolver.resolve_by_id(funder_id) if not funder_doi_url: funder_doi_url = self.funder_resolver.resolve_by_oai_id(oai_id) if not funder_doi_url: raise FunderNotFoundError(oai_id, funder_id, subfunder_id) funder_doi = FundRefDOIResolver.strip_doi_host(funder_doi_url) if not funder_name: # Grab name from FundRef record. resolver = Resolver( pid_type='frdoi', object_type='rec', getter=Record.get_record) try: dummy_pid, funder_rec = resolver.resolve(funder_doi) funder_name = funder_rec['acronyms'][0] except PersistentIdentifierError: raise OAIRELoadingError( "Please ensure that funders have been loaded prior to" "loading grants. Could not resolve funder {0}".format( funder_doi)) return dict( doi=funder_doi, url=funder_doi_url, name=funder_name, program=subfunder_name, )
python
{ "resource": "" }
q38469
BaseOAIRELoader.grantxml2json
train
def grantxml2json(self, grant_xml): """Convert OpenAIRE grant XML into JSON.""" tree = etree.fromstring(grant_xml) # XML harvested from OAI-PMH has a different format/structure if tree.prefix == 'oai': ptree = self.get_subtree( tree, '/oai:record/oai:metadata/oaf:entity/oaf:project')[0] header = self.get_subtree(tree, '/oai:record/oai:header')[0] oai_id = self.get_text_node(header, 'oai:identifier') modified = self.get_text_node(header, 'oai:datestamp') else: ptree = self.get_subtree( tree, '/record/result/metadata/oaf:entity/oaf:project')[0] header = self.get_subtree(tree, '/record/result/header')[0] oai_id = self.get_text_node(header, 'dri:objIdentifier') modified = self.get_text_node(header, 'dri:dateOfTransformation') url = self.get_text_node(ptree, 'websiteurl') code = self.get_text_node(ptree, 'code') title = self.get_text_node(ptree, 'title') acronym = self.get_text_node(ptree, 'acronym') startdate = self.get_text_node(ptree, 'startdate') enddate = self.get_text_node(ptree, 'enddate') funder = self.fundertree2json(ptree, oai_id) internal_id = "{0}::{1}".format(funder['doi'], code) eurepo_id = \ "info:eu-repo/grantAgreement/{funder}/{program}/{code}/".format( funder=quote_plus(funder['name'].encode('utf8')), program=quote_plus(funder['program'].encode('utf8')), code=quote_plus(code.encode('utf8')), ) ret_json = { '$schema': self.schema_formatter.schema_url, 'internal_id': internal_id, 'identifiers': { 'oaf': oai_id, 'eurepo': eurepo_id, 'purl': url if url.startswith("http://purl.org/") else None, }, 'code': code, 'title': title, 'acronym': acronym, 'startdate': startdate, 'enddate': enddate, 'funder': {'$ref': funder['url']}, 'program': funder['program'], 'url': url, 'remote_modified': modified, } return ret_json
python
{ "resource": "" }
q38470
LocalOAIRELoader.iter_grants
train
def iter_grants(self, as_json=True): """Fetch records from the SQLite database.""" self._connect() result = self.db_connection.cursor().execute( "SELECT data, format FROM grants" ) for data, data_format in result: if (not as_json) and data_format == 'json': raise Exception("Cannot convert JSON source to XML output.") elif as_json and data_format == 'xml': data = self.grantxml2json(data) elif as_json and data_format == 'json': data = json.loads(data) yield data self._disconnect()
python
{ "resource": "" }
q38471
RemoteOAIRELoader.iter_grants
train
def iter_grants(self, as_json=True): """Fetch grants from a remote OAI-PMH endpoint. Return the Sickle-provided generator object. """ records = self.client.ListRecords(metadataPrefix='oaf', set=self.setspec) for rec in records: try: grant_out = rec.raw # rec.raw is XML if as_json: grant_out = self.grantxml2json(grant_out) yield grant_out except FunderNotFoundError as e: current_app.logger.warning("Funder '{0}' not found.".format( e.funder_id))
python
{ "resource": "" }
q38472
OAIREDumper.dump
train
def dump(self, as_json=True, commit_batch_size=100): """ Dump the grant information to a local storage. :param as_json: Convert XML to JSON before saving (default: True). """ connection = sqlite3.connect(self.destination) format_ = 'json' if as_json else 'xml' if not self._db_exists(connection): connection.execute( "CREATE TABLE grants (data text, format text)") # This will call the RemoteOAIRELoader.iter_grants and fetch # records from remote location. grants_iterator = self.loader.iter_grants(as_json=as_json) for idx, grant_data in enumerate(grants_iterator, 1): if as_json: grant_data = json.dumps(grant_data, indent=2) connection.execute( "INSERT INTO grants VALUES (?, ?)", (grant_data, format_)) # Commit to database every N records if idx % commit_batch_size == 0: connection.commit() connection.commit() connection.close()
python
{ "resource": "" }
q38473
BaseFundRefLoader.iter_funders
train
def iter_funders(self): """Get a converted list of Funders as JSON dict.""" root = self.doc_root funders = root.findall('./skos:Concept', namespaces=self.namespaces) for funder in funders: funder_json = self.fundrefxml2json(funder) yield funder_json
python
{ "resource": "" }
q38474
FundRefDOIResolver.resolve_by_oai_id
train
def resolve_by_oai_id(self, oai_id): """Resolve the funder from the OpenAIRE OAI record id. Hack for when funder is not provided in OpenAIRE. """ if oai_id.startswith('oai:dnet:'): oai_id = oai_id[len('oai:dnet:'):] prefix = oai_id.split("::")[0] suffix = prefix.replace("_", "").upper() oaf = "{0}::{1}".format(prefix, suffix) return self.data.get(oaf)
python
{ "resource": "" }
q38475
predicates
train
def predicates(graph): """ Return a listing of all known predicates in the registered schemata, including the schema path they associate with, their name and allowed types. """ seen = set() def _traverse(binding): if binding.path in seen: return seen.add(binding.path) if binding.is_object: for prop in binding.properties: yield (binding.path, prop.name, tuple(prop.types)) for pred in _traverse(prop): yield pred elif binding.is_array: for item in binding.items: for pred in _traverse(item): yield pred schemas = graph.aliases.values() schemas.extend(graph.resolver.store) for schema_uri in graph.aliases.values(): binding = graph.get_binding(schema_uri, None) for pred in _traverse(binding): if pred not in seen: yield pred seen.add(pred)
python
{ "resource": "" }
q38476
passwordReset1to2
train
def passwordReset1to2(old): """ Power down and delete the item """ new = old.upgradeVersion(old.typeName, 1, 2, installedOn=None) for iface in new.store.interfacesFor(new): new.store.powerDown(new, iface) new.deleteFromStore()
python
{ "resource": "" }
q38477
ticket1to2
train
def ticket1to2(old): """ change Ticket to refer to Products and not benefactor factories. """ if isinstance(old.benefactor, Multifactor): types = list(chain(*[b.powerupNames for b in old.benefactor.benefactors('ascending')])) elif isinstance(old.benefactor, InitializerBenefactor): #oh man what a mess types = list(chain(*[b.powerupNames for b in old.benefactor.realBenefactor.benefactors('ascending')])) newProduct = old.store.findOrCreate(Product, types=types) if old.issuer is None: issuer = old.store.findOrCreate(TicketBooth) else: issuer = old.issuer t = old.upgradeVersion(Ticket.typeName, 1, 2, product = newProduct, issuer = issuer, booth = old.booth, avatar = old.avatar, claimed = old.claimed, email = old.email, nonce = old.nonce)
python
{ "resource": "" }
q38478
_getPublicSignupInfo
train
def _getPublicSignupInfo(siteStore): """ Get information about public web-based signup mechanisms. @param siteStore: a store with some signups installed on it (as indicated by _SignupTracker instances). @return: a generator which yields 2-tuples of (prompt, url) where 'prompt' is unicode briefly describing the signup mechanism (e.g. "Sign Up"), and 'url' is a (unicode) local URL linking to a page where an anonymous user can access it. """ # Note the underscore; this _should_ be a public API but it is currently an # unfortunate hack; there should be a different powerup interface that # requires prompt and prefixURL attributes rather than _SignupTracker. # -glyph for tr in siteStore.query(_SignupTracker): si = tr.signupItem p = getattr(si, 'prompt', None) u = getattr(si, 'prefixURL', None) if p is not None and u is not None: yield (p, u'/'+u)
python
{ "resource": "" }
q38479
PasswordResetResource.renderHTTP
train
def renderHTTP(self, ctx): """ Handle the password reset form. The following exchange describes the process: S: Render C{reset} C: POST C{username} or C{email} S: L{handleRequestForUser}, render C{reset-check-email} (User follows the emailed reset link) S: Render C{reset-step-two} C: POST C{password1} S: L{resetPassword}, render C{reset-done} """ req = inevow.IRequest(ctx) if req.method == 'POST': if req.args.get('username', [''])[0]: user = unicode(usernameFromRequest(req), 'ascii') self.handleRequestForUser(user, URL.fromContext(ctx)) self.fragment = self.templateResolver.getDocFactory( 'reset-check-email') elif req.args.get('email', [''])[0]: email = req.args['email'][0].decode('ascii') acct = self.accountByAddress(email) if acct is not None: username = '@'.join( userbase.getAccountNames(acct.avatars.open()).next()) self.handleRequestForUser(username, URL.fromContext(ctx)) self.fragment = self.templateResolver.getDocFactory('reset-check-email') elif 'password1' in req.args: (password,) = req.args['password1'] self.resetPassword(self.attempt, unicode(password)) self.fragment = self.templateResolver.getDocFactory('reset-done') else: # Empty submit; redirect back to self return URL.fromContext(ctx) elif self.attempt: self.fragment = self.templateResolver.getDocFactory('reset-step-two') return PublicPage.renderHTTP(self, ctx)
python
{ "resource": "" }
q38480
PasswordResetResource._makeKey
train
def _makeKey(self, usern): """ Make a new, probably unique key. This key will be sent in an email to the user and is used to access the password change form. """ return unicode(hashlib.md5(str((usern, time.time(), random.random()))).hexdigest())
python
{ "resource": "" }
q38481
TicketBooth.issueViaEmail
train
def issueViaEmail(self, issuer, email, product, templateData, domainName, httpPort=80): """ Send a ticket via email to the supplied address, which, when claimed, will create an avatar and allow the given product to endow it with things. @param issuer: An object, preferably a user, to track who issued this ticket. @param email: a str, formatted as an rfc2821 email address (user@domain) -- source routes not allowed. @param product: an instance of L{Product} @param domainName: a domain name, used as the domain part of the sender's address, and as the web server to generate a link to within the email. @param httpPort: a port number for the web server running on domainName @param templateData: A string containing an rfc2822-format email message, which will have several python values interpolated into it dictwise: %(from)s: To be used for the From: header; will contain an rfc2822-format address. %(to)s: the address that we are going to send to. %(date)s: an rfc2822-format date. %(message-id)s: an rfc2822 message-id %(link)s: an HTTP URL that we are generating a link to. """ ticket = self.createTicket(issuer, unicode(email, 'ascii'), product) nonce = ticket.nonce signupInfo = {'from': 'signup@'+domainName, 'to': email, 'date': rfc822.formatdate(), 'message-id': smtp.messageid(), 'link': self.ticketLink(domainName, httpPort, nonce)} msg = templateData % signupInfo return ticket, _sendEmail(signupInfo['from'], email, msg)
python
{ "resource": "" }
q38482
UserInfoSignup.usernameAvailable
train
def usernameAvailable(self, username, domain): """ Check to see if a username is available for the user to select. """ if len(username) < 2: return [False, u"Username too short"] for char in u"[ ,:;<>@()!\"'%&\\|\t\b": if char in username: return [False, u"Username contains invalid character: '%s'" % char] # The localpart is acceptable if it can be parsed as the local part # of an RFC 2821 address. try: parseAddress("<%s@example.com>" % (username,)) except ArgumentError: return [False, u"Username fails to parse"] # The domain is acceptable if it is one which we actually host. if domain not in self.getAvailableDomains(): return [False, u"Domain not allowed"] query = self.store.query(userbase.LoginMethod, AND(userbase.LoginMethod.localpart == username, userbase.LoginMethod.domain == domain)) return [not bool(query.count()), u"Username already taken"]
python
{ "resource": "" }
q38483
SignupConfiguration.createSignup
train
def createSignup(self, creator, signupClass, signupConf, product, emailTemplate, prompt): """ Create a new signup facility in the site store's database. @param creator: a unicode string describing the creator of the new signup mechanism, for auditing purposes. @param signupClass: the item type of the signup mechanism to create. @param signupConf: a dictionary of keyword arguments for L{signupClass}'s constructor. @param product: A Product instance, describing the powerups to be installed with this signup. @param emailTemplate: a unicode string which contains some text that will be sent in confirmation emails generated by this signup mechanism (if any) @param prompt: a short unicode string describing this signup mechanism, as distinct from others. For example: "Student Sign Up", or "Faculty Sign Up" @return: a newly-created, database-resident instance of signupClass. """ siteStore = self.store.parent booth = siteStore.findOrCreate(TicketBooth, lambda booth: installOn(booth, siteStore)) signupItem = signupClass( store=siteStore, booth=booth, product=product, emailTemplate=emailTemplate, prompt=prompt, **signupConf) siteStore.powerUp(signupItem) _SignupTracker(store=siteStore, signupItem=signupItem, createdOn=extime.Time(), createdBy=creator) return signupItem
python
{ "resource": "" }
q38484
ProductFormMixin.makeProductPicker
train
def makeProductPicker(self): """ Make a LiveForm with radio buttons for each Product in the store. """ productPicker = liveform.LiveForm( self.coerceProduct, [liveform.Parameter( str(id(product)), liveform.FORM_INPUT, liveform.LiveForm( lambda selectedProduct, product=product: selectedProduct and product, [liveform.Parameter( 'selectedProduct', liveform.RADIO_INPUT, bool, repr(product))] )) for product in self.original.store.parent.query(Product)], u"Product to Install") return productPicker
python
{ "resource": "" }
q38485
SignupFragment._deleteTrackers
train
def _deleteTrackers(self, trackers): """ Delete the given signup trackers and their associated signup resources. @param trackers: sequence of L{_SignupTrackers} """ for tracker in trackers: if tracker.store is None: # we're not updating the list of live signups client side, so # we might get a signup that has already been deleted continue sig = tracker.signupItem # XXX the only reason we're doing this here is that we're afraid to # add a whenDeleted=CASCADE to powerups because it's inefficient, # however, this is arguably the archetypical use of # whenDeleted=CASCADE. Soon we need to figure out a real solution # (but I have no idea what it is). -glyph for iface in sig.store.interfacesFor(sig): sig.store.powerDown(sig, iface) tracker.deleteFromStore() sig.deleteFromStore()
python
{ "resource": "" }
q38486
Image.fetch
train
def fetch(self): """ Fetch & return a new `Image` object representing the image's current state :rtype: Image :raises DOAPIError: if the API endpoint replies with an error (e.g., if the image no longer exists) """ api = self.doapi_manager return api._image(api.request(self.url)["image"])
python
{ "resource": "" }
q38487
Censusname.generate
train
def generate(self, nameformat=None, capitalize=None, formatters=None, **kwargs): '''Pick a random name form a specified list of name parts''' nameformat = nameformat or self.nameformat capitalize = capitalize or self.capitalize formatters = formatters or {} lines = self._get_lines(kwargs) names = dict((k, v['name']) for k, v in list(lines.items())) if capitalize: names = dict((k, n.capitalize()) for k, n in list(names.items())) merged_formatters = dict() try: merged_formatters = dict( (k, self.formatters.get(k, []) + formatters.get(k, [])) for k in set(list(self.formatters.keys()) + list(formatters.keys())) ) except AttributeError: raise TypeError("keyword argument 'formatters' for Censusname.generate() must be a dict") if merged_formatters: for key, functions in list(merged_formatters.items()): # 'surname', [func_a, func_b] for func in functions: # names['surname'] = func_a(name['surname']) names[key] = func(names[key]) return nameformat.format(**names)
python
{ "resource": "" }
q38488
Censusname.pick_frequency_line
train
def pick_frequency_line(self, filename, frequency, cumulativefield='cumulative_frequency'): '''Given a numeric frequency, pick a line from a csv with a cumulative frequency field''' if resource_exists('censusname', filename): with closing(resource_stream('censusname', filename)) as b: g = codecs.iterdecode(b, 'ascii') return self._pick_frequency_line(g, frequency, cumulativefield) else: with open(filename, encoding='ascii') as g: return self._pick_frequency_line(g, frequency, cumulativefield)
python
{ "resource": "" }
q38489
git_repo
train
def git_repo(): """ Returns the git repository root if the cwd is in a repo, else None """ try: reldir = subprocess.check_output( ["git", "rev-parse", "--git-dir"]) reldir = reldir.decode("utf-8") return os.path.basename(os.path.dirname(os.path.abspath(reldir))) except subprocess.CalledProcessError: return None
python
{ "resource": "" }
q38490
git_hash
train
def git_hash(): """returns the current git hash or unknown if not in git repo""" if git_repo() is None: return "unknown" git_hash = subprocess.check_output( ["git", "rev-parse", "HEAD"]) # git_hash is a byte string; we want a string. git_hash = git_hash.decode("utf-8") # git_hash also comes with an extra \n at the end, which we remove. git_hash = git_hash.strip() return git_hash
python
{ "resource": "" }
q38491
git_pretty
train
def git_pretty(): """returns a pretty summary of the commit or unkown if not in git repo""" if git_repo() is None: return "unknown" pretty = subprocess.check_output( ["git", "log", "--pretty=format:%h %s", "-n", "1"]) pretty = pretty.decode("utf-8") pretty = pretty.strip() return pretty
python
{ "resource": "" }
q38492
invocation
train
def invocation(): """reconstructs the invocation for this python program""" cmdargs = [sys.executable] + sys.argv[:] invocation = " ".join(shlex.quote(s) for s in cmdargs) return invocation
python
{ "resource": "" }
q38493
serialize
train
def serialize(func): """ Falcon response serialization """ def wrapped(instance, req, resp, **kwargs): assert not req.get_param("unicode") or req.get_param("unicode") == u"✓", "Unicode sanity check failed" resp.set_header("Cache-Control", "no-cache, no-store, must-revalidate"); resp.set_header("Pragma", "no-cache"); resp.set_header("Expires", "0"); r = func(instance, req, resp, **kwargs) if not resp.body: if not req.client_accepts_json: raise falcon.HTTPUnsupportedMediaType( 'This API only supports the JSON media type.', href='http://docs.examples.com/api/json') resp.set_header('Content-Type', 'application/json') resp.body = json.dumps(r, cls=MyEncoder) return r return wrapped
python
{ "resource": "" }
q38494
ExcelWrite.openSheet
train
def openSheet(self, name): ''' set a sheet to write ''' if name not in self.__sheetNameDict: sheet = self.__workbook.add_sheet(name) self.__sheetNameDict[name] = sheet self.__sheet = self.__sheetNameDict[name]
python
{ "resource": "" }
q38495
ExcelWrite.__getSheet
train
def __getSheet(self, name): ''' get a sheet by name ''' if not self.sheetExsit(name): raise UfException(Errors.SHEET_NAME_INVALID, "Can't find a sheet named %s" % name) return self.__sheetNameDict[name]
python
{ "resource": "" }
q38496
ExcelWrite.writeCell
train
def writeCell(self, row, col, value): ''' write a cell ''' if self.__sheet is None: self.openSheet(super(ExcelWrite, self).DEFAULT_SHEET) self.__sheet.write(row, col, value)
python
{ "resource": "" }
q38497
ExcelWrite.writeRow
train
def writeRow(self, row, values): ''' write a row Not sure whether xlwt support write the same cell multiple times ''' if self.__sheet is None: self.openSheet(super(ExcelWrite, self).DEFAULT_SHEET) for index, value in enumerate(values): self.__sheet.write(row, index, value)
python
{ "resource": "" }
q38498
ExcelRead.readCell
train
def readCell(self, row, col): ''' read a cell''' try: if self.__sheet is None: self.openSheet(super(ExcelRead, self).DEFAULT_SHEET) return self.__sheet.cell(row, col).value except BaseException as excp: raise UfException(Errors.UNKNOWN_ERROR, "Unknown Error in Excellib.readCell %s" % excp)
python
{ "resource": "" }
q38499
Cache.delete_url
train
def delete_url(self, url): """ Delete local files downloaded from given URL """ # file may exist locally in compressed and decompressed states # delete both for decompress in [False, True]: key = (url, decompress) if key in self._local_paths: path = self._local_paths[key] remove(path) del self._local_paths[key] # possible that file was downloaded via the download module without # using the Cache object, this wouldn't end up in the local_paths # but should still be deleted path = self.local_path( url, decompress=decompress, download=False) if exists(path): remove(path)
python
{ "resource": "" }