sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def create (self, name, *args):
"""Creates a new command with the given arguments."""
tab = None
defn = self.get(name, None)
if defn:
tab = FSWTab(defn, *args)
return tab | Creates a new command with the given arguments. | entailment |
def load (self, filename):
"""Loads Command Definitions from the given YAML file into this
Command Dictionary.
"""
if self.filename is None:
self.filename = filename
stream = open(self.filename, "rb")
for doc in yaml.load_all(stream):
for table in doc:
self.add(table)
stream.close() | Loads Command Definitions from the given YAML file into this
Command Dictionary. | entailment |
def publish(self, msg):
"""
Publishes input message with client name as topic.
"""
self.pub.send("{} {}".format(self.name, msg))
log.debug('Published message from {}'.format(self)) | Publishes input message with client name as topic. | entailment |
def create(database, tlmdict=None):
"""Creates a new database for the given Telemetry Dictionary and
returns a connection to it.
"""
if tlmdict is None:
tlmdict = tlm.getDefaultDict()
dbconn = connect(database)
for name, defn in tlmdict.items():
createTable(dbconn, defn)
return dbconn | Creates a new database for the given Telemetry Dictionary and
returns a connection to it. | entailment |
def createTable(dbconn, pd):
"""Creates a database table for the given PacketDefinition."""
cols = ('%s %s' % (defn.name, getTypename(defn)) for defn in pd.fields)
sql = 'CREATE TABLE IF NOT EXISTS %s (%s)' % (pd.name, ', '.join(cols))
dbconn.execute(sql)
dbconn.commit() | Creates a database table for the given PacketDefinition. | entailment |
def insert(dbconn, packet):
"""Inserts the given packet into the connected database."""
values = [ ]
pd = packet._defn
for defn in pd.fields:
if defn.enum:
val = getattr(packet.raw, defn.name)
else:
val = getattr(packet, defn.name)
if val is None and defn.name in pd.history:
val = getattr(packet.history, defn.name)
values.append(val)
qmark = ['?'] * len(values)
sql = 'INSERT INTO %s VALUES (%s)' % (pd.name, ', '.join(qmark))
dbconn.execute(sql, values) | Inserts the given packet into the connected database. | entailment |
def use(backend):
"""Use the given database backend, e.g. 'MySQLdb', 'psycopg2',
'MySQLdb', etc.
"""
global Backend
try:
Backend = importlib.import_module(backend)
except ImportError:
msg = 'Could not import (load) database.backend: %s' % backend
raise cfg.AitConfigError(msg) | Use the given database backend, e.g. 'MySQLdb', 'psycopg2',
'MySQLdb', etc. | entailment |
def connect(self, **kwargs):
''' Connect to an InfluxDB instance
Connects to an InfluxDB instance and switches to a given database.
If the database doesn't exist it is created first via :func:`create`.
**Configuration Parameters**
host
The host for the connection. Passed as either the config key
**database.host** or the kwargs argument **host**. Defaults to
**localhost**.
port
The port for the connection. Passed as either the config key
**database.port** or the kwargs argument **port**. Defaults to
**8086**.
un
The un for the connection. Passed as either the config key
**database.un** or the kwargs argument **un**. Defaults to
**root**.
pw
The pw for the connection. Passed as either the config key
**database.pw** or the kwargs argument **pw**. Defaults to
**pw**.
database name
The database name for the connection. Passed as either
the config key **database.dbname** or the kwargs argument
**database**. Defaults to **ait**.
'''
host = ait.config.get('database.host', kwargs.get('host', 'localhost'))
port = ait.config.get('database.port', kwargs.get('port', 8086))
un = ait.config.get('database.un', kwargs.get('un', 'root'))
pw = ait.config.get('database.pw', kwargs.get('pw', 'root'))
dbname = ait.config.get('database.dbname', kwargs.get('database', 'ait'))
self._conn = self._backend.InfluxDBClient(host, port, un, pw)
if dbname not in [v['name'] for v in self._conn.get_list_database()]:
self.create(database=dbname)
self._conn.switch_database(dbname) | Connect to an InfluxDB instance
Connects to an InfluxDB instance and switches to a given database.
If the database doesn't exist it is created first via :func:`create`.
**Configuration Parameters**
host
The host for the connection. Passed as either the config key
**database.host** or the kwargs argument **host**. Defaults to
**localhost**.
port
The port for the connection. Passed as either the config key
**database.port** or the kwargs argument **port**. Defaults to
**8086**.
un
The un for the connection. Passed as either the config key
**database.un** or the kwargs argument **un**. Defaults to
**root**.
pw
The pw for the connection. Passed as either the config key
**database.pw** or the kwargs argument **pw**. Defaults to
**pw**.
database name
The database name for the connection. Passed as either
the config key **database.dbname** or the kwargs argument
**database**. Defaults to **ait**. | entailment |
def create(self, **kwargs):
''' Create a database in a connected InfluxDB instance
**Configuration Parameters**
database name
The database name to create. Passed as either the config
key **database.dbname** or the kwargs argument
**database**. Defaults to **ait**.
Raises:
AttributeError:
If a connection to the database doesn't exist
'''
dbname = ait.config.get('database.dbname', kwargs.get('database', 'ait'))
if self._conn is None:
raise AttributeError('Unable to create database. No connection to database exists.')
self._conn.create_database(dbname)
self._conn.switch_database(dbname) | Create a database in a connected InfluxDB instance
**Configuration Parameters**
database name
The database name to create. Passed as either the config
key **database.dbname** or the kwargs argument
**database**. Defaults to **ait**.
Raises:
AttributeError:
If a connection to the database doesn't exist | entailment |
def insert(self, packet, time=None, **kwargs):
''' Insert a packet into the database
Arguments
packet
The :class:`ait.core.tlm.Packet` instance to insert into
the database
time
Optional parameter specifying the time value to use when inserting
the record into the database. Default case does not provide a time
value so Influx defaults to the current time when inserting the
record.
tags
Optional kwargs argument for specifying a dictionary of tags to
include when adding the values. Defaults to nothing.
'''
fields = {}
pd = packet._defn
for defn in pd.fields:
val = getattr(packet.raw, defn.name)
if pd.history and defn.name in pd.history:
val = getattr(packet.history, defn.name)
if val is not None and not (isinstance(val, float) and math.isnan(val)):
fields[defn.name] = val
if len(fields) == 0:
log.error('No fields present to insert into Influx')
return
tags = kwargs.get('tags', {})
if isinstance(time, dt.datetime):
time = time.strftime("%Y-%m-%dT%H:%M:%S")
data = {
'measurement': pd.name,
'tags': tags,
'fields': fields
}
if time:
data['time'] = time
self._conn.write_points([data]) | Insert a packet into the database
Arguments
packet
The :class:`ait.core.tlm.Packet` instance to insert into
the database
time
Optional parameter specifying the time value to use when inserting
the record into the database. Default case does not provide a time
value so Influx defaults to the current time when inserting the
record.
tags
Optional kwargs argument for specifying a dictionary of tags to
include when adding the values. Defaults to nothing. | entailment |
def create_packets_from_results(self, packet_name, result_set):
''' Generate AIT Packets from a InfluxDB query ResultSet
Extract Influx DB query results into one packet per result entry. This
assumes that telemetry data was inserted in the format generated by
:func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
evaluated if they can be properly encoded from the raw value in the
query result. If there is no opcode / EVR-code for a particular raw
value the value is skipped (and thus defaulted to 0).
Arguments
packet_name (string)
The name of the AIT Packet to create from each result entry
result_set (influxdb.resultset.ResultSet)
The query ResultSet object to convert into packets
Returns
A list of packets extracted from the ResultSet object or None if
an invalid packet name is supplied.
'''
try:
pkt_defn = tlm.getDefaultDict()[packet_name]
except KeyError:
log.error('Unknown packet name {} Unable to unpack ResultSet'.format(packet_name))
return None
pkt = tlm.Packet(pkt_defn)
pkts = []
for r in result_set.get_points():
new_pkt = tlm.Packet(pkt_defn)
for f, f_defn in pkt_defn.fieldmap.iteritems():
field_type_name = f_defn.type.name
if field_type_name == 'CMD16':
if cmd.getDefaultDict().opcodes.get(r[f], None):
setattr(new_pkt, f, cmd_def.name)
elif field_type_name == 'EVR16':
if evr.getDefaultDict().codes.get(r[f], None):
setattr(new_pkt, f, r[f])
elif field_type_name == 'TIME8':
setattr(new_pkt, f, r[f] / 256.0)
elif field_type_name == 'TIME32':
new_val = dmc.GPS_Epoch + dt.timedelta(seconds=r[f])
setattr(new_pkt, f, new_val)
elif field_type_name == 'TIME40':
sec = int(r[f])
microsec = r[f] * 1e6
new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec, microseconds=microsec)
setattr(new_pkt, f, new_val)
elif field_type_name == 'TIME64':
sec = int(r[f])
microsec = r[f] % 1 * 1e6
new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec, microseconds=microsec)
setattr(new_pkt, f, new_val)
else:
try:
setattr(new_pkt, f, r[f])
except KeyError:
log.info('Field not found in query results {} Skipping ...'.format(f))
pkts.append(new_pkt)
return pkts | Generate AIT Packets from a InfluxDB query ResultSet
Extract Influx DB query results into one packet per result entry. This
assumes that telemetry data was inserted in the format generated by
:func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
evaluated if they can be properly encoded from the raw value in the
query result. If there is no opcode / EVR-code for a particular raw
value the value is skipped (and thus defaulted to 0).
Arguments
packet_name (string)
The name of the AIT Packet to create from each result entry
result_set (influxdb.resultset.ResultSet)
The query ResultSet object to convert into packets
Returns
A list of packets extracted from the ResultSet object or None if
an invalid packet name is supplied. | entailment |
def connect(self, **kwargs):
''' Connect to a SQLite instance
**Configuration Parameters**
database
The database name or file to "connect" to. Defaults to **ait**.
'''
if 'database' not in kwargs:
kwargs['database'] = 'ait'
self._conn = self._backend.connect(kwargs['database']) | Connect to a SQLite instance
**Configuration Parameters**
database
The database name or file to "connect" to. Defaults to **ait**. | entailment |
def create(self, **kwargs):
''' Create a database for the current telemetry dictionary
Connects to a SQLite instance via :func:`connect` and creates a
skeleton database for future data inserts.
**Configuration Parameters**
tlmdict
The :class:`ait.core.tlm.TlmDict` instance to use. Defaults to
the currently configured telemetry dictionary.
'''
tlmdict = kwargs.get('tlmdict', tlm.getDefaultDict())
self.connect(**kwargs)
for name, defn in tlmdict.items():
self._create_table(defn) | Create a database for the current telemetry dictionary
Connects to a SQLite instance via :func:`connect` and creates a
skeleton database for future data inserts.
**Configuration Parameters**
tlmdict
The :class:`ait.core.tlm.TlmDict` instance to use. Defaults to
the currently configured telemetry dictionary. | entailment |
def _create_table(self, packet_defn):
''' Creates a database table for the given PacketDefinition
Arguments
packet_defn
The :class:`ait.core.tlm.PacketDefinition` instance for which a table entry
should be made.
'''
cols = ('%s %s' % (defn.name, self._getTypename(defn)) for defn in packet_defn.fields)
sql = 'CREATE TABLE IF NOT EXISTS %s (%s)' % (packet_defn.name, ', '.join(cols))
self._conn.execute(sql)
self._conn.commit() | Creates a database table for the given PacketDefinition
Arguments
packet_defn
The :class:`ait.core.tlm.PacketDefinition` instance for which a table entry
should be made. | entailment |
def insert(self, packet, **kwargs):
''' Insert a packet into the database
Arguments
packet
The :class:`ait.core.tlm.Packet` instance to insert into
the database
'''
values = [ ]
pd = packet._defn
for defn in pd.fields:
val = getattr(packet.raw, defn.name)
if val is None and defn.name in pd.history:
val = getattr(packet.history, defn.name)
values.append(val)
qmark = ['?'] * len(values)
sql = 'INSERT INTO %s VALUES (%s)' % (pd.name, ', '.join(qmark))
self._conn.execute(sql, values) | Insert a packet into the database
Arguments
packet
The :class:`ait.core.tlm.Packet` instance to insert into
the database | entailment |
def _getTypename(self, defn):
""" Returns the SQL typename required to store the given FieldDefinition """
return 'REAL' if defn.type.float or 'TIME' in defn.type.name or defn.dntoeu else 'INTEGER' | Returns the SQL typename required to store the given FieldDefinition | entailment |
def genCubeVector(x, y, z, x_mult=1, y_mult=1, z_mult=1):
"""Generates a map of vector lengths from the center point to each coordinate
x - width of matrix to generate
y - height of matrix to generate
z - depth of matrix to generate
x_mult - value to scale x-axis by
y_mult - value to scale y-axis by
z_mult - value to scale z-axis by
"""
cX = (x - 1) / 2.0
cY = (y - 1) / 2.0
cZ = (z - 1) / 2.0
def vect(_x, _y, _z):
return int(math.sqrt(math.pow(_x - cX, 2 * x_mult) +
math.pow(_y - cY, 2 * y_mult) +
math.pow(_z - cZ, 2 * z_mult)))
return [[[vect(_x, _y, _z) for _z in range(z)] for _y in range(y)] for _x in range(x)] | Generates a map of vector lengths from the center point to each coordinate
x - width of matrix to generate
y - height of matrix to generate
z - depth of matrix to generate
x_mult - value to scale x-axis by
y_mult - value to scale y-axis by
z_mult - value to scale z-axis by | entailment |
def adjust_datetime_to_timezone(value, from_tz, to_tz=None):
"""
Given a ``datetime`` object adjust it according to the from_tz timezone
string into the to_tz timezone string.
"""
if to_tz is None:
to_tz = settings.TIME_ZONE
if value.tzinfo is None:
if not hasattr(from_tz, "localize"):
from_tz = pytz.timezone(smart_str(from_tz))
value = from_tz.localize(value)
return value.astimezone(pytz.timezone(smart_str(to_tz))) | Given a ``datetime`` object adjust it according to the from_tz timezone
string into the to_tz timezone string. | entailment |
def get_db_prep_save(self, value, connection=None):
"""
Returns field's value prepared for saving into a database.
"""
## convert to settings.TIME_ZONE
if value is not None:
if value.tzinfo is None:
value = default_tz.localize(value)
else:
value = value.astimezone(default_tz)
return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection) | Returns field's value prepared for saving into a database. | entailment |
def get_db_prep_lookup(self, lookup_type, value, connection=None, prepared=None):
"""
Returns field's value prepared for database lookup.
"""
## convert to settings.TIME_ZONE
if value.tzinfo is None:
value = default_tz.localize(value)
else:
value = value.astimezone(default_tz)
return super(LocalizedDateTimeField, self).get_db_prep_lookup(lookup_type, value, connection=connection, prepared=prepared) | Returns field's value prepared for database lookup. | entailment |
def liveNeighbours(self, z, y, x):
"""Returns the number of live neighbours."""
count = 0
for oz, oy, ox in self.offsets:
cz, cy, cx = z + oz, y + oy, x + ox
if cz >= self.depth:
cz = 0
if cy >= self.height:
cy = 0
if cx >= self.width:
cx = 0
count += self.table[cz][cy][cx]
# if self.toroidal:
# if y == 0:
# if self.table[self.height - 1][x]:
# count = count + 1
# if y == self.height - 1:
# if self.table[0][x]:
# count = count + 1
# if x == 0:
# if self.table[y][self.width - 1]:
# count = count + 1
# if x == self.width - 1:
# if self.table[y][0]:
# count = count + 1
return count | Returns the number of live neighbours. | entailment |
def turn(self):
"""Turn"""
r = (4, 5, 5, 5)
nt = copy.deepcopy(self.table)
for z in range(self.depth):
for y in range(self.height):
for x in range(self.width):
neighbours = self.liveNeighbours(z, y, x)
if self.table[z][y][x] == 0 and (neighbours > r[0] and neighbours <= r[1]):
nt[z][y][x] = 1
elif self.table[z][y][x] == 1 and (neighbours > r[2] and neighbours < r[3]):
nt[z][y][x] = 0
self._oldStates.append(self.table)
if len(self._oldStates) > 3:
self._oldStates.popleft()
self.table = nt | Turn | entailment |
def make_bd(self):
"Make a set of 'shaped' random #'s for particle brightness deltas (bd)"
self.bd = concatenate((
# These values will dim the particles
random.normal(
self.bd_mean - self.bd_mu, self.bd_sigma, 16).astype(int),
# These values will brighten the particles
random.normal(
self.bd_mean + self.bd_mu, self.bd_sigma, 16).astype(int)),
axis=0) | Make a set of 'shaped' random #'s for particle brightness deltas (bd) | entailment |
def make_vel(self):
"Make a set of velocities to be randomly chosen for emitted particles"
self.vel = random.normal(self.vel_mu, self.vel_sigma, 16)
# Make sure nothing's slower than 1/8 pixel / step
for i, vel in enumerate(self.vel):
if abs(vel) < 0.125 / self._size:
if vel < 0:
self.vel[i] = -0.125 / self._size
else:
self.vel[i] = 0.125 / self._size | Make a set of velocities to be randomly chosen for emitted particles | entailment |
def move_particles(self):
"""
Move each particle by it's velocity, adjusting brightness as we go.
Particles that have moved beyond their range (steps to live), and
those that move off the ends and are not wrapped get sacked.
Particles can stay between _end and up to but not including _end+1
No particles can exitst before start without wrapping.
"""
moved_particles = []
for vel, pos, stl, color, bright in self.particles:
stl -= 1 # steps to live
if stl > 0:
pos = pos + vel
if vel > 0:
if pos >= (self._end + 1):
if self.wrap:
pos = pos - (self._end + 1) + self._start
else:
continue # Sacked
else:
if pos < self._start:
if self.wrap:
pos = pos + self._end + 1 + self._start
else:
continue # Sacked
if random.random() < self.step_flare_prob:
bright = 255
else:
bright = bright + random.choice(self.bd)
if bright > 255:
bright = 255
# Zombie particles with bright<=0 walk, don't -overflow
if bright < -10000:
bright = -10000
moved_particles.append((vel, pos, stl, color, bright))
self.particles = moved_particles | Move each particle by it's velocity, adjusting brightness as we go.
Particles that have moved beyond their range (steps to live), and
those that move off the ends and are not wrapped get sacked.
Particles can stay between _end and up to but not including _end+1
No particles can exitst before start without wrapping. | entailment |
def move_emitters(self):
"""
Move each emitter by it's velocity. Emmitters that move off the ends
and are not wrapped get sacked.
"""
moved_emitters = []
for e_pos, e_dir, e_vel, e_range, e_color, e_pal in self.emitters:
e_pos = e_pos + e_vel
if e_vel > 0:
if e_pos >= (self._end + 1):
if self.wrap:
e_pos = e_pos - (self._end + 1) + self._start
else:
continue # Sacked
else:
if e_pos < self._start:
if self.wrap:
e_pos = e_pos + self._end + 1 + self._start
else:
continue # Sacked
moved_emitters.append(
(e_pos, e_dir, e_vel, e_range, e_color, e_pal))
self.emitters = moved_emitters | Move each emitter by it's velocity. Emmitters that move off the ends
and are not wrapped get sacked. | entailment |
def start_new_particles(self):
"""
Start some new particles from the emitters. We roll the dice
starts_at_once times, seeing if we can start each particle based
on starts_prob. If we start, the particle gets a color form
the palette and a velocity from the vel list.
"""
for e_pos, e_dir, e_vel, e_range, e_color, e_pal in self.emitters:
for roll in range(self.starts_at_once):
if random.random() < self.starts_prob: # Start one?
p_vel = self.vel[random.choice(len(self.vel))]
if e_dir < 0 or e_dir == 0 and random.random() > 0.5:
p_vel = -p_vel
self.particles.append((
p_vel, # Velocity
e_pos, # Position
int(e_range // abs(p_vel)), # steps to live
e_pal[
random.choice(len(e_pal))], # Color
255)) | Start some new particles from the emitters. We roll the dice
starts_at_once times, seeing if we can start each particle based
on starts_prob. If we start, the particle gets a color form
the palette and a velocity from the vel list. | entailment |
def visibility(self, strip_pos, particle_pos):
"""
Compute particle visibility based on distance between current
strip position being rendered and particle position. A value
of 0.0 is returned if they are >= one aperture away, values
between 0.0 and 1.0 are returned if they are less than one
aperature apart.
"""
dist = abs(particle_pos - strip_pos)
if dist > self.half_size:
dist = self._size - dist
if dist < self.aperture:
return (self.aperture - dist) / self.aperture
else:
return 0 | Compute particle visibility based on distance between current
strip position being rendered and particle position. A value
of 0.0 is returned if they are >= one aperture away, values
between 0.0 and 1.0 are returned if they are less than one
aperature apart. | entailment |
def render_particles(self):
"""
Render visible particles at each strip position, by modifying
the strip's color list.
"""
for strip_pos in range(self._start, self._end + 1):
blended = COLORS.black
# Render visible emitters
if self.has_e_colors:
for (e_pos, e_dir, e_vel, e_range,
e_color, e_pal) in self.emitters:
if e_color is not None:
vis = self.visibility(strip_pos, e_pos)
if vis > 0:
blended = color_blend(
blended,
color_scale(e_color, int(vis * 255)))
# Render visible particles
for vel, pos, stl, color, bright in self.particles:
vis = self.visibility(strip_pos, pos)
if vis > 0 and bright > 0:
blended = color_blend(
blended,
color_scale(color, int(vis * bright)))
# Add background if showing
if (blended == COLORS.black):
blended = self.bgcolor
self.color_list[strip_pos] = blended | Render visible particles at each strip position, by modifying
the strip's color list. | entailment |
def step(self, amt=1):
"Make a frame of the animation"
self.move_particles()
if self.has_moving_emitters:
self.move_emitters()
self.start_new_particles()
self.render_particles()
if self.emitters == [] and self.particles == []:
self.completed = True | Make a frame of the animation | entailment |
def complete(self):
"""is the game over?"""
if None not in [v for v in self.squares]:
return True
if self.winner() is not None:
return True
return False | is the game over? | entailment |
def get_squares(self, player=None):
"""squares that belong to a player"""
if player:
return [k for k, v in enumerate(self.squares) if v == player]
else:
return self.squares | squares that belong to a player | entailment |
def liveNeighbours(self, y, x):
"""Returns the number of live neighbours."""
count = 0
if y > 0:
if self.table[y - 1][x]:
count = count + 1
if x > 0:
if self.table[y - 1][x - 1]:
count = count + 1
if self.width > (x + 1):
if self.table[y - 1][x + 1]:
count = count + 1
if x > 0:
if self.table[y][x - 1]:
count = count + 1
if self.width > (x + 1):
if self.table[y][x + 1]:
count = count + 1
if self.height > (y + 1):
if self.table[y + 1][x]:
count = count + 1
if x > 0:
if self.table[y + 1][x - 1]:
count = count + 1
if self.width > (x + 1):
if self.table[y + 1][x + 1]:
count = count + 1
if self.toroidal:
if y == 0:
if self.table[self.height - 1][x]:
count = count + 1
if y == self.height - 1:
if self.table[0][x]:
count = count + 1
if x == 0:
if self.table[y][self.width - 1]:
count = count + 1
if x == self.width - 1:
if self.table[y][0]:
count = count + 1
return count | Returns the number of live neighbours. | entailment |
def makeGly(segID, N, CA, C, O, geo):
'''Creates a Glycine residue'''
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "GLY", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
##print(res)
return res | Creates a Glycine residue | entailment |
def makeAla(segID, N, CA, C, O, geo):
'''Creates an Alanine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
##Create Residue Data Structure
res = Residue((' ', segID, ' '), "ALA", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
return res | Creates an Alanine residue | entailment |
def makeSer(segID, N, CA, C, O, geo):
'''Creates a Serine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_OG_length=geo.CB_OG_length
CA_CB_OG_angle=geo.CA_CB_OG_angle
N_CA_CB_OG_diangle=geo.N_CA_CB_OG_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
oxygen_g= calculateCoordinates(N, CA, CB, CB_OG_length, CA_CB_OG_angle, N_CA_CB_OG_diangle)
OG= Atom("OG", oxygen_g, 0.0, 1.0, " ", " OG", 0, "O")
##Create Reside Data Structure
res= Residue((' ', segID, ' '), "SER", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(OG)
##print(res)
return res | Creates a Serine residue | entailment |
def makeCys(segID, N, CA, C, O, geo):
'''Creates a Cysteine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_SG_length= geo.CB_SG_length
CA_CB_SG_angle= geo.CA_CB_SG_angle
N_CA_CB_SG_diangle= geo.N_CA_CB_SG_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
sulfur_g= calculateCoordinates(N, CA, CB, CB_SG_length, CA_CB_SG_angle, N_CA_CB_SG_diangle)
SG= Atom("SG", sulfur_g, 0.0, 1.0, " ", " SG", 0, "S")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "CYS", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(SG)
return res | Creates a Cysteine residue | entailment |
def makeVal(segID, N, CA, C, O, geo):
'''Creates a Valine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG1_length=geo.CB_CG1_length
CA_CB_CG1_angle=geo.CA_CB_CG1_angle
N_CA_CB_CG1_diangle=geo.N_CA_CB_CG1_diangle
CB_CG2_length=geo.CB_CG2_length
CA_CB_CG2_angle=geo.CA_CB_CG2_angle
N_CA_CB_CG2_diangle=geo.N_CA_CB_CG2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g1= calculateCoordinates(N, CA, CB, CB_CG1_length, CA_CB_CG1_angle, N_CA_CB_CG1_diangle)
CG1= Atom("CG1", carbon_g1, 0.0, 1.0, " ", " CG1", 0, "C")
carbon_g2= calculateCoordinates(N, CA, CB, CB_CG2_length, CA_CB_CG2_angle, N_CA_CB_CG2_diangle)
CG2= Atom("CG2", carbon_g2, 0.0, 1.0, " ", " CG2", 0, "C")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "VAL", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG1)
res.add(CG2)
return res | Creates a Valine residue | entailment |
def makeIle(segID, N, CA, C, O, geo):
'''Creates an Isoleucine residue'''
##R-group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG1_length=geo.CB_CG1_length
CA_CB_CG1_angle=geo.CA_CB_CG1_angle
N_CA_CB_CG1_diangle=geo.N_CA_CB_CG1_diangle
CB_CG2_length=geo.CB_CG2_length
CA_CB_CG2_angle=geo.CA_CB_CG2_angle
N_CA_CB_CG2_diangle= geo.N_CA_CB_CG2_diangle
CG1_CD1_length= geo.CG1_CD1_length
CB_CG1_CD1_angle= geo.CB_CG1_CD1_angle
CA_CB_CG1_CD1_diangle= geo.CA_CB_CG1_CD1_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g1= calculateCoordinates(N, CA, CB, CB_CG1_length, CA_CB_CG1_angle, N_CA_CB_CG1_diangle)
CG1= Atom("CG1", carbon_g1, 0.0, 1.0, " ", " CG1", 0, "C")
carbon_g2= calculateCoordinates(N, CA, CB, CB_CG2_length, CA_CB_CG2_angle, N_CA_CB_CG2_diangle)
CG2= Atom("CG2", carbon_g2, 0.0, 1.0, " ", " CG2", 0, "C")
carbon_d1= calculateCoordinates(CA, CB, CG1, CG1_CD1_length, CB_CG1_CD1_angle, CA_CB_CG1_CD1_diangle)
CD1= Atom("CD1", carbon_d1, 0.0, 1.0, " ", " CD1", 0, "C")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "ILE", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG1)
res.add(CG2)
res.add(CD1)
return res | Creates an Isoleucine residue | entailment |
def makeLeu(segID, N, CA, C, O, geo):
'''Creates a Leucine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle= geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD1_length=geo.CG_CD1_length
CB_CG_CD1_angle=geo.CB_CG_CD1_angle
CA_CB_CG_CD1_diangle=geo.CA_CB_CG_CD1_diangle
CG_CD2_length=geo.CG_CD2_length
CB_CG_CD2_angle=geo.CB_CG_CD2_angle
CA_CB_CG_CD2_diangle=geo.CA_CB_CG_CD2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g1= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g1, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d1= calculateCoordinates(CA, CB, CG, CG_CD1_length, CB_CG_CD1_angle, CA_CB_CG_CD1_diangle)
CD1= Atom("CD1", carbon_d1, 0.0, 1.0, " ", " CD1", 0, "C")
carbon_d2= calculateCoordinates(CA, CB, CG, CG_CD2_length, CB_CG_CD2_angle, CA_CB_CG_CD2_diangle)
CD2= Atom("CD2", carbon_d2, 0.0, 1.0, " ", " CD2", 0, "C")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "LEU", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD1)
res.add(CD2)
return res | Creates a Leucine residue | entailment |
def makeThr(segID, N, CA, C, O, geo):
'''Creates a Threonine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_OG1_length=geo.CB_OG1_length
CA_CB_OG1_angle=geo.CA_CB_OG1_angle
N_CA_CB_OG1_diangle=geo.N_CA_CB_OG1_diangle
CB_CG2_length=geo.CB_CG2_length
CA_CB_CG2_angle=geo.CA_CB_CG2_angle
N_CA_CB_CG2_diangle= geo.N_CA_CB_CG2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
oxygen_g1= calculateCoordinates(N, CA, CB, CB_OG1_length, CA_CB_OG1_angle, N_CA_CB_OG1_diangle)
OG1= Atom("OG1", oxygen_g1, 0.0, 1.0, " ", " OG1", 0, "O")
carbon_g2= calculateCoordinates(N, CA, CB, CB_CG2_length, CA_CB_CG2_angle, N_CA_CB_CG2_diangle)
CG2= Atom("CG2", carbon_g2, 0.0, 1.0, " ", " CG2", 0, "C")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "THR", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(OG1)
res.add(CG2)
return res | Creates a Threonine residue | entailment |
def makeArg(segID, N, CA, C, O, geo):
'''Creates an Arginie residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle= geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD_length=geo.CG_CD_length
CB_CG_CD_angle=geo.CB_CG_CD_angle
CA_CB_CG_CD_diangle=geo.CA_CB_CG_CD_diangle
CD_NE_length=geo.CD_NE_length
CG_CD_NE_angle=geo.CG_CD_NE_angle
CB_CG_CD_NE_diangle=geo.CB_CG_CD_NE_diangle
NE_CZ_length=geo.NE_CZ_length
CD_NE_CZ_angle=geo.CD_NE_CZ_angle
CG_CD_NE_CZ_diangle=geo.CG_CD_NE_CZ_diangle
CZ_NH1_length=geo.CZ_NH1_length
NE_CZ_NH1_angle=geo.NE_CZ_NH1_angle
CD_NE_CZ_NH1_diangle=geo.CD_NE_CZ_NH1_diangle
CZ_NH2_length=geo.CZ_NH2_length
NE_CZ_NH2_angle=geo.NE_CZ_NH2_angle
CD_NE_CZ_NH2_diangle=geo.CD_NE_CZ_NH2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d= calculateCoordinates(CA, CB, CG, CG_CD_length, CB_CG_CD_angle, CA_CB_CG_CD_diangle)
CD= Atom("CD", carbon_d, 0.0, 1.0, " ", " CD", 0, "C")
nitrogen_e= calculateCoordinates(CB, CG, CD, CD_NE_length, CG_CD_NE_angle, CB_CG_CD_NE_diangle)
NE= Atom("NE", nitrogen_e, 0.0, 1.0, " ", " NE", 0, "N")
carbon_z= calculateCoordinates(CG, CD, NE, NE_CZ_length, CD_NE_CZ_angle, CG_CD_NE_CZ_diangle)
CZ= Atom("CZ", carbon_z, 0.0, 1.0, " ", " CZ", 0, "C")
nitrogen_h1= calculateCoordinates(CD, NE, CZ, CZ_NH1_length, NE_CZ_NH1_angle, CD_NE_CZ_NH1_diangle)
NH1= Atom("NH1", nitrogen_h1, 0.0, 1.0, " ", " NH1", 0, "N")
nitrogen_h2= calculateCoordinates(CD, NE, CZ, CZ_NH2_length, NE_CZ_NH2_angle, CD_NE_CZ_NH2_diangle)
NH2= Atom("NH2", nitrogen_h2, 0.0, 1.0, " ", " NH2", 0, "N")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "ARG", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD)
res.add(NE)
res.add(CZ)
res.add(NH1)
res.add(NH2)
return res | Creates an Arginie residue | entailment |
def makeLys(segID, N, CA, C, O, geo):
'''Creates a Lysine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD_length=geo.CG_CD_length
CB_CG_CD_angle=geo.CB_CG_CD_angle
CA_CB_CG_CD_diangle=geo.CA_CB_CG_CD_diangle
CD_CE_length=geo.CD_CE_length
CG_CD_CE_angle=geo.CG_CD_CE_angle
CB_CG_CD_CE_diangle=geo.CB_CG_CD_CE_diangle
CE_NZ_length=geo.CE_NZ_length
CD_CE_NZ_angle=geo.CD_CE_NZ_angle
CG_CD_CE_NZ_diangle=geo.CG_CD_CE_NZ_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d= calculateCoordinates(CA, CB, CG, CG_CD_length, CB_CG_CD_angle, CA_CB_CG_CD_diangle)
CD= Atom("CD", carbon_d, 0.0, 1.0, " ", " CD", 0, "C")
carbon_e= calculateCoordinates(CB, CG, CD, CD_CE_length, CG_CD_CE_angle, CB_CG_CD_CE_diangle)
CE= Atom("CE", carbon_e, 0.0, 1.0, " ", " CE", 0, "C")
nitrogen_z= calculateCoordinates(CG, CD, CE, CE_NZ_length, CD_CE_NZ_angle, CG_CD_CE_NZ_diangle)
NZ= Atom("NZ", nitrogen_z, 0.0, 1.0, " ", " NZ", 0, "N")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "LYS", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD)
res.add(CE)
res.add(NZ)
return res | Creates a Lysine residue | entailment |
def makeAsp(segID, N, CA, C, O, geo):
'''Creates an Aspartic Acid residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_OD1_length=geo.CG_OD1_length
CB_CG_OD1_angle=geo.CB_CG_OD1_angle
CA_CB_CG_OD1_diangle=geo.CA_CB_CG_OD1_diangle
CG_OD2_length=geo.CG_OD2_length
CB_CG_OD2_angle=geo.CB_CG_OD2_angle
CA_CB_CG_OD2_diangle=geo.CA_CB_CG_OD2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
oxygen_d1= calculateCoordinates(CA, CB, CG, CG_OD1_length, CB_CG_OD1_angle, CA_CB_CG_OD1_diangle)
OD1= Atom("OD1", oxygen_d1, 0.0, 1.0, " ", " OD1", 0, "O")
oxygen_d2= calculateCoordinates(CA, CB, CG, CG_OD2_length, CB_CG_OD2_angle, CA_CB_CG_OD2_diangle)
OD2= Atom("OD2", oxygen_d2, 0.0, 1.0, " ", " OD2", 0, "O")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "ASP", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(OD1)
res.add(OD2)
return res | Creates an Aspartic Acid residue | entailment |
def makeAsn(segID,N, CA, C, O, geo):
'''Creates an Asparagine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_OD1_length=geo.CG_OD1_length
CB_CG_OD1_angle=geo.CB_CG_OD1_angle
CA_CB_CG_OD1_diangle=geo.CA_CB_CG_OD1_diangle
CG_ND2_length=geo.CG_ND2_length
CB_CG_ND2_angle=geo.CB_CG_ND2_angle
CA_CB_CG_ND2_diangle=geo.CA_CB_CG_ND2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
oxygen_d1= calculateCoordinates(CA, CB, CG, CG_OD1_length, CB_CG_OD1_angle, CA_CB_CG_OD1_diangle)
OD1= Atom("OD1", oxygen_d1, 0.0, 1.0, " ", " OD1", 0, "O")
nitrogen_d2= calculateCoordinates(CA, CB, CG, CG_ND2_length, CB_CG_ND2_angle, CA_CB_CG_ND2_diangle)
ND2= Atom("ND2", nitrogen_d2, 0.0, 1.0, " ", " ND2", 0, "N")
res= Residue((' ', segID, ' '), "ASN", ' ')
##Create Residue Data Structure
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(OD1)
res.add(ND2)
return res | Creates an Asparagine residue | entailment |
def makeGlu(segID, N, CA, C, O, geo):
'''Creates a Glutamic Acid residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle = geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD_length=geo.CG_CD_length
CB_CG_CD_angle=geo.CB_CG_CD_angle
CA_CB_CG_CD_diangle=geo.CA_CB_CG_CD_diangle
CD_OE1_length=geo.CD_OE1_length
CG_CD_OE1_angle=geo.CG_CD_OE1_angle
CB_CG_CD_OE1_diangle=geo.CB_CG_CD_OE1_diangle
CD_OE2_length=geo.CD_OE2_length
CG_CD_OE2_angle=geo.CG_CD_OE2_angle
CB_CG_CD_OE2_diangle=geo.CB_CG_CD_OE2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d= calculateCoordinates(CA, CB, CG, CG_CD_length, CB_CG_CD_angle, CA_CB_CG_CD_diangle)
CD= Atom("CD", carbon_d, 0.0, 1.0, " ", " CD", 0, "C")
oxygen_e1= calculateCoordinates(CB, CG, CD, CD_OE1_length, CG_CD_OE1_angle, CB_CG_CD_OE1_diangle)
OE1= Atom("OE1", oxygen_e1, 0.0, 1.0, " ", " OE1", 0, "O")
oxygen_e2= calculateCoordinates(CB, CG, CD, CD_OE2_length, CG_CD_OE2_angle, CB_CG_CD_OE2_diangle)
OE2= Atom("OE2", oxygen_e2, 0.0, 1.0, " ", " OE2", 0, "O")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "GLU", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD)
res.add(OE1)
res.add(OE2)
return res | Creates a Glutamic Acid residue | entailment |
def makeGln(segID, N, CA, C, O, geo):
'''Creates a Glutamine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD_length=geo.CG_CD_length
CB_CG_CD_angle=geo.CB_CG_CD_angle
CA_CB_CG_CD_diangle=geo.CA_CB_CG_CD_diangle
CD_OE1_length=geo.CD_OE1_length
CG_CD_OE1_angle=geo.CG_CD_OE1_angle
CB_CG_CD_OE1_diangle=geo.CB_CG_CD_OE1_diangle
CD_NE2_length=geo.CD_NE2_length
CG_CD_NE2_angle=geo.CG_CD_NE2_angle
CB_CG_CD_NE2_diangle=geo.CB_CG_CD_NE2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d= calculateCoordinates(CA, CB, CG, CG_CD_length, CB_CG_CD_angle, CA_CB_CG_CD_diangle)
CD= Atom("CD", carbon_d, 0.0, 1.0, " ", " CD", 0, "C")
oxygen_e1= calculateCoordinates(CB, CG, CD, CD_OE1_length, CG_CD_OE1_angle, CB_CG_CD_OE1_diangle)
OE1= Atom("OE1", oxygen_e1, 0.0, 1.0, " ", " OE1", 0, "O")
nitrogen_e2= calculateCoordinates(CB, CG, CD, CD_NE2_length, CG_CD_NE2_angle, CB_CG_CD_NE2_diangle)
NE2= Atom("NE2", nitrogen_e2, 0.0, 1.0, " ", " NE2", 0, "N")
##Create Residue DS
res= Residue((' ', segID, ' '), "GLN", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD)
res.add(OE1)
res.add(NE2)
return res | Creates a Glutamine residue | entailment |
def makeMet(segID, N, CA, C, O, geo):
'''Creates a Methionine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_SD_length=geo.CG_SD_length
CB_CG_SD_angle=geo.CB_CG_SD_angle
CA_CB_CG_SD_diangle=geo.CA_CB_CG_SD_diangle
SD_CE_length=geo.SD_CE_length
CG_SD_CE_angle=geo.CG_SD_CE_angle
CB_CG_SD_CE_diangle=geo.CB_CG_SD_CE_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
sulfur_d= calculateCoordinates(CA, CB, CG, CG_SD_length, CB_CG_SD_angle, CA_CB_CG_SD_diangle)
SD= Atom("SD", sulfur_d, 0.0, 1.0, " ", " SD", 0, "S")
carbon_e= calculateCoordinates(CB, CG, SD, SD_CE_length, CG_SD_CE_angle, CB_CG_SD_CE_diangle)
CE= Atom("CE", carbon_e, 0.0, 1.0, " ", " CE", 0, "C")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "MET", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(SD)
res.add(CE)
return res | Creates a Methionine residue | entailment |
def makeHis(segID, N, CA, C, O, geo):
'''Creates a Histidine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_ND1_length=geo.CG_ND1_length
CB_CG_ND1_angle=geo.CB_CG_ND1_angle
CA_CB_CG_ND1_diangle=geo.CA_CB_CG_ND1_diangle
CG_CD2_length=geo.CG_CD2_length
CB_CG_CD2_angle=geo.CB_CG_CD2_angle
CA_CB_CG_CD2_diangle=geo.CA_CB_CG_CD2_diangle
ND1_CE1_length=geo.ND1_CE1_length
CG_ND1_CE1_angle=geo.CG_ND1_CE1_angle
CB_CG_ND1_CE1_diangle=geo.CB_CG_ND1_CE1_diangle
CD2_NE2_length=geo.CD2_NE2_length
CG_CD2_NE2_angle=geo.CG_CD2_NE2_angle
CB_CG_CD2_NE2_diangle=geo.CB_CG_CD2_NE2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
nitrogen_d1= calculateCoordinates(CA, CB, CG, CG_ND1_length, CB_CG_ND1_angle, CA_CB_CG_ND1_diangle)
ND1= Atom("ND1", nitrogen_d1, 0.0, 1.0, " ", " ND1", 0, "N")
carbon_d2= calculateCoordinates(CA, CB, CG, CG_CD2_length, CB_CG_CD2_angle, CA_CB_CG_CD2_diangle)
CD2= Atom("CD2", carbon_d2, 0.0, 1.0, " ", " CD2", 0, "C")
carbon_e1= calculateCoordinates(CB, CG, ND1, ND1_CE1_length, CG_ND1_CE1_angle, CB_CG_ND1_CE1_diangle)
CE1= Atom("CE1", carbon_e1, 0.0, 1.0, " ", " CE1", 0, "C")
nitrogen_e2= calculateCoordinates(CB, CG, CD2, CD2_NE2_length, CG_CD2_NE2_angle, CB_CG_CD2_NE2_diangle)
NE2= Atom("NE2", nitrogen_e2, 0.0, 1.0, " ", " NE2", 0, "N")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "HIS", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(ND1)
res.add(CD2)
res.add(CE1)
res.add(NE2)
return res | Creates a Histidine residue | entailment |
def makePro(segID, N, CA, C, O, geo):
'''Creates a Proline residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD_length=geo.CG_CD_length
CB_CG_CD_angle=geo.CB_CG_CD_angle
CA_CB_CG_CD_diangle=geo.CA_CB_CG_CD_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d= calculateCoordinates(CA, CB, CG, CG_CD_length, CB_CG_CD_angle, CA_CB_CG_CD_diangle)
CD= Atom("CD", carbon_d, 0.0, 1.0, " ", " CD", 0, "C")
##Create Residue Data Structure
res= Residue((' ', segID, ' '), "PRO", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD)
return res | Creates a Proline residue | entailment |
def makePhe(segID, N, CA, C, O, geo):
'''Creates a Phenylalanine residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD1_length=geo.CG_CD1_length
CB_CG_CD1_angle=geo.CB_CG_CD1_angle
CA_CB_CG_CD1_diangle=geo.CA_CB_CG_CD1_diangle
CG_CD2_length=geo.CG_CD2_length
CB_CG_CD2_angle=geo.CB_CG_CD2_angle
CA_CB_CG_CD2_diangle= geo.CA_CB_CG_CD2_diangle
CD1_CE1_length=geo.CD1_CE1_length
CG_CD1_CE1_angle=geo.CG_CD1_CE1_angle
CB_CG_CD1_CE1_diangle=geo.CB_CG_CD1_CE1_diangle
CD2_CE2_length=geo.CD2_CE2_length
CG_CD2_CE2_angle=geo.CG_CD2_CE2_angle
CB_CG_CD2_CE2_diangle=geo.CB_CG_CD2_CE2_diangle
CE1_CZ_length=geo.CE1_CZ_length
CD1_CE1_CZ_angle=geo.CD1_CE1_CZ_angle
CG_CD1_CE1_CZ_diangle=geo.CG_CD1_CE1_CZ_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d1= calculateCoordinates(CA, CB, CG, CG_CD1_length, CB_CG_CD1_angle, CA_CB_CG_CD1_diangle)
CD1= Atom("CD1", carbon_d1, 0.0, 1.0, " ", " CD1", 0, "C")
carbon_d2= calculateCoordinates(CA, CB, CG, CG_CD2_length, CB_CG_CD2_angle, CA_CB_CG_CD2_diangle)
CD2= Atom("CD2", carbon_d2, 0.0, 1.0, " ", " CD2", 0, "C")
carbon_e1= calculateCoordinates(CB, CG, CD1, CD1_CE1_length, CG_CD1_CE1_angle, CB_CG_CD1_CE1_diangle)
CE1= Atom("CE1", carbon_e1, 0.0, 1.0, " ", " CE1", 0, "C")
carbon_e2= calculateCoordinates(CB, CG, CD2, CD2_CE2_length, CG_CD2_CE2_angle, CB_CG_CD2_CE2_diangle)
CE2= Atom("CE2", carbon_e2, 0.0, 1.0, " ", " CE2", 0, "C")
carbon_z= calculateCoordinates(CG, CD1, CE1, CE1_CZ_length, CD1_CE1_CZ_angle, CG_CD1_CE1_CZ_diangle)
CZ= Atom("CZ", carbon_z, 0.0, 1.0, " ", " CZ", 0, "C")
##Create Residue Data Structures
res= Residue((' ', segID, ' '), "PHE", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD1)
res.add(CE1)
res.add(CD2)
res.add(CE2)
res.add(CZ)
return res | Creates a Phenylalanine residue | entailment |
def makeTrp(segID, N, CA, C, O, geo):
'''Creates a Tryptophan residue'''
##R-Group
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_CG_length=geo.CB_CG_length
CA_CB_CG_angle=geo.CA_CB_CG_angle
N_CA_CB_CG_diangle=geo.N_CA_CB_CG_diangle
CG_CD1_length=geo.CG_CD1_length
CB_CG_CD1_angle=geo.CB_CG_CD1_angle
CA_CB_CG_CD1_diangle=geo.CA_CB_CG_CD1_diangle
CG_CD2_length=geo.CG_CD2_length
CB_CG_CD2_angle=geo.CB_CG_CD2_angle
CA_CB_CG_CD2_diangle=geo.CA_CB_CG_CD2_diangle
CD1_NE1_length=geo.CD1_NE1_length
CG_CD1_NE1_angle=geo.CG_CD1_NE1_angle
CB_CG_CD1_NE1_diangle=geo.CB_CG_CD1_NE1_diangle
CD2_CE2_length=geo.CD2_CE2_length
CG_CD2_CE2_angle=geo.CG_CD2_CE2_angle
CB_CG_CD2_CE2_diangle=geo.CB_CG_CD2_CE2_diangle
CD2_CE3_length=geo.CD2_CE3_length
CG_CD2_CE3_angle=geo.CG_CD2_CE3_angle
CB_CG_CD2_CE3_diangle=geo.CB_CG_CD2_CE3_diangle
CE2_CZ2_length=geo.CE2_CZ2_length
CD2_CE2_CZ2_angle=geo.CD2_CE2_CZ2_angle
CG_CD2_CE2_CZ2_diangle=geo.CG_CD2_CE2_CZ2_diangle
CE3_CZ3_length=geo.CE3_CZ3_length
CD2_CE3_CZ3_angle=geo.CD2_CE3_CZ3_angle
CG_CD2_CE3_CZ3_diangle=geo.CG_CD2_CE3_CZ3_diangle
CZ2_CH2_length=geo.CZ2_CH2_length
CE2_CZ2_CH2_angle=geo.CE2_CZ2_CH2_angle
CD2_CE2_CZ2_CH2_diangle=geo.CD2_CE2_CZ2_CH2_diangle
carbon_b= calculateCoordinates(N, C, CA, CA_CB_length, C_CA_CB_angle, N_C_CA_CB_diangle)
CB= Atom("CB", carbon_b, 0.0 , 1.0, " "," CB", 0,"C")
carbon_g= calculateCoordinates(N, CA, CB, CB_CG_length, CA_CB_CG_angle, N_CA_CB_CG_diangle)
CG= Atom("CG", carbon_g, 0.0, 1.0, " ", " CG", 0, "C")
carbon_d1= calculateCoordinates(CA, CB, CG, CG_CD1_length, CB_CG_CD1_angle, CA_CB_CG_CD1_diangle)
CD1= Atom("CD1", carbon_d1, 0.0, 1.0, " ", " CD1", 0, "C")
carbon_d2= calculateCoordinates(CA, CB, CG, CG_CD2_length, CB_CG_CD2_angle, CA_CB_CG_CD2_diangle)
CD2= Atom("CD2", carbon_d2, 0.0, 1.0, " ", " CD2", 0, "C")
nitrogen_e1= calculateCoordinates(CB, CG, CD1, CD1_NE1_length, CG_CD1_NE1_angle, CB_CG_CD1_NE1_diangle)
NE1= Atom("NE1", nitrogen_e1, 0.0, 1.0, " ", " NE1", 0, "N")
carbon_e2= calculateCoordinates(CB, CG, CD2, CD2_CE2_length, CG_CD2_CE2_angle, CB_CG_CD2_CE2_diangle)
CE2= Atom("CE2", carbon_e2, 0.0, 1.0, " ", " CE2", 0, "C")
carbon_e3= calculateCoordinates(CB, CG, CD2, CD2_CE3_length, CG_CD2_CE3_angle, CB_CG_CD2_CE3_diangle)
CE3= Atom("CE3", carbon_e3, 0.0, 1.0, " ", " CE3", 0, "C")
carbon_z2= calculateCoordinates(CG, CD2, CE2, CE2_CZ2_length, CD2_CE2_CZ2_angle, CG_CD2_CE2_CZ2_diangle)
CZ2= Atom("CZ2", carbon_z2, 0.0, 1.0, " ", " CZ2", 0, "C")
carbon_z3= calculateCoordinates(CG, CD2, CE3, CE3_CZ3_length, CD2_CE3_CZ3_angle, CG_CD2_CE3_CZ3_diangle)
CZ3= Atom("CZ3", carbon_z3, 0.0, 1.0, " ", " CZ3", 0, "C")
carbon_h2= calculateCoordinates(CD2, CE2, CZ2, CZ2_CH2_length, CE2_CZ2_CH2_angle, CD2_CE2_CZ2_CH2_diangle)
CH2= Atom("CH2", carbon_h2, 0.0, 1.0, " ", " CH2", 0, "C")
##Create Residue DS
res= Residue((' ', segID, ' '), "TRP", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
res.add(CB)
res.add(CG)
res.add(CD1)
res.add(CD2)
res.add(NE1)
res.add(CE2)
res.add(CE3)
res.add(CZ2)
res.add(CZ3)
res.add(CH2)
return res | Creates a Tryptophan residue | entailment |
def initialize_res(residue):
'''Creates a new structure containing a single amino acid. The type and
geometry of the amino acid are determined by the argument, which has to be
either a geometry object or a single-letter amino acid code.
The amino acid will be placed into chain A of model 0.'''
if isinstance( residue, Geo ):
geo = residue
else:
geo=geometry(residue)
segID=1
AA= geo.residue_name
CA_N_length=geo.CA_N_length
CA_C_length=geo.CA_C_length
N_CA_C_angle=geo.N_CA_C_angle
CA_coord= numpy.array([0.,0.,0.])
C_coord= numpy.array([CA_C_length,0,0])
N_coord = numpy.array([CA_N_length*math.cos(N_CA_C_angle*(math.pi/180.0)),CA_N_length*math.sin(N_CA_C_angle*(math.pi/180.0)),0])
N= Atom("N", N_coord, 0.0 , 1.0, " "," N", 0, "N")
CA=Atom("CA", CA_coord, 0.0 , 1.0, " "," CA", 0,"C")
C= Atom("C", C_coord, 0.0, 1.0, " ", " C",0,"C")
##Create Carbonyl atom (to be moved later)
C_O_length=geo.C_O_length
CA_C_O_angle=geo.CA_C_O_angle
N_CA_C_O_diangle=geo.N_CA_C_O_diangle
carbonyl=calculateCoordinates(N, CA, C, C_O_length, CA_C_O_angle, N_CA_C_O_diangle)
O= Atom("O",carbonyl , 0.0 , 1.0, " "," O", 0, "O")
if(AA=='G'):
res=makeGly(segID, N, CA, C, O, geo)
elif(AA=='A'):
res=makeAla(segID, N, CA, C, O, geo)
elif(AA=='S'):
res=makeSer(segID, N, CA, C, O, geo)
elif(AA=='C'):
res=makeCys(segID, N, CA, C, O, geo)
elif(AA=='V'):
res=makeVal(segID, N, CA, C, O, geo)
elif(AA=='I'):
res=makeIle(segID, N, CA, C, O, geo)
elif(AA=='L'):
res=makeLeu(segID, N, CA, C, O, geo)
elif(AA=='T'):
res=makeThr(segID, N, CA, C, O, geo)
elif(AA=='R'):
res=makeArg(segID, N, CA, C, O, geo)
elif(AA=='K'):
res=makeLys(segID, N, CA, C, O, geo)
elif(AA=='D'):
res=makeAsp(segID, N, CA, C, O, geo)
elif(AA=='E'):
res=makeGlu(segID, N, CA, C, O, geo)
elif(AA=='N'):
res=makeAsn(segID, N, CA, C, O, geo)
elif(AA=='Q'):
res=makeGln(segID, N, CA, C, O, geo)
elif(AA=='M'):
res=makeMet(segID, N, CA, C, O, geo)
elif(AA=='H'):
res=makeHis(segID, N, CA, C, O, geo)
elif(AA=='P'):
res=makePro(segID, N, CA, C, O, geo)
elif(AA=='F'):
res=makePhe(segID, N, CA, C, O, geo)
elif(AA=='Y'):
res=makeTyr(segID, N, CA, C, O, geo)
elif(AA=='W'):
res=makeTrp(segID, N, CA, C, O, geo)
else:
res=makeGly(segID, N, CA, C, O, geo)
cha= Chain('A')
cha.add(res)
mod= Model(0)
mod.add(cha)
struc= Structure('X')
struc.add(mod)
return struc | Creates a new structure containing a single amino acid. The type and
geometry of the amino acid are determined by the argument, which has to be
either a geometry object or a single-letter amino acid code.
The amino acid will be placed into chain A of model 0. | entailment |
def add_residue_from_geo(structure, geo):
'''Adds a residue to chain A model 0 of the given structure, and
returns the new structure. The residue to be added is determined by
the geometry object given as second argument.
This function is a helper function and should not normally be called
directly. Call add_residue() instead.'''
resRef= getReferenceResidue(structure)
AA=geo.residue_name
segID= resRef.get_id()[1]
segID+=1
##geometry to bring together residue
peptide_bond=geo.peptide_bond
CA_C_N_angle=geo.CA_C_N_angle
C_N_CA_angle=geo.C_N_CA_angle
##Backbone Coordinages
N_CA_C_angle=geo.N_CA_C_angle
CA_N_length=geo.CA_N_length
CA_C_length=geo.CA_C_length
phi= geo.phi
psi_im1=geo.psi_im1
omega=geo.omega
N_coord=calculateCoordinates(resRef['N'], resRef['CA'], resRef['C'], peptide_bond, CA_C_N_angle, psi_im1)
N= Atom("N", N_coord, 0.0 , 1.0, " "," N", 0, "N")
CA_coord=calculateCoordinates(resRef['CA'], resRef['C'], N, CA_N_length, C_N_CA_angle, omega)
CA=Atom("CA", CA_coord, 0.0 , 1.0, " "," CA", 0,"C")
C_coord=calculateCoordinates(resRef['C'], N, CA, CA_C_length, N_CA_C_angle, phi)
C= Atom("C", C_coord, 0.0, 1.0, " ", " C",0,"C")
##Create Carbonyl atom (to be moved later)
C_O_length=geo.C_O_length
CA_C_O_angle=geo.CA_C_O_angle
N_CA_C_O_diangle=geo.N_CA_C_O_diangle
carbonyl=calculateCoordinates(N, CA, C, C_O_length, CA_C_O_angle, N_CA_C_O_diangle)
O= Atom("O",carbonyl , 0.0 , 1.0, " "," O", 0, "O")
if(AA=='G'):
res=makeGly(segID, N, CA, C, O, geo)
elif(AA=='A'):
res=makeAla(segID, N, CA, C, O, geo)
elif(AA=='S'):
res=makeSer(segID, N, CA, C, O, geo)
elif(AA=='C'):
res=makeCys(segID, N, CA, C, O, geo)
elif(AA=='V'):
res=makeVal(segID, N, CA, C, O, geo)
elif(AA=='I'):
res=makeIle(segID, N, CA, C, O, geo)
elif(AA=='L'):
res=makeLeu(segID, N, CA, C, O, geo)
elif(AA=='T'):
res=makeThr(segID, N, CA, C, O, geo)
elif(AA=='R'):
res=makeArg(segID, N, CA, C, O, geo)
elif(AA=='K'):
res=makeLys(segID, N, CA, C, O, geo)
elif(AA=='D'):
res=makeAsp(segID, N, CA, C, O, geo)
elif(AA=='E'):
res=makeGlu(segID, N, CA, C, O, geo)
elif(AA=='N'):
res=makeAsn(segID, N, CA, C, O, geo)
elif(AA=='Q'):
res=makeGln(segID, N, CA, C, O, geo)
elif(AA=='M'):
res=makeMet(segID, N, CA, C, O, geo)
elif(AA=='H'):
res=makeHis(segID, N, CA, C, O, geo)
elif(AA=='P'):
res=makePro(segID, N, CA, C, O, geo)
elif(AA=='F'):
res=makePhe(segID, N, CA, C, O, geo)
elif(AA=='Y'):
res=makeTyr(segID, N, CA, C, O, geo)
elif(AA=='W'):
res=makeTrp(segID, N, CA, C, O, geo)
else:
res=makeGly(segID, N, CA, C, O, geo)
resRef['O'].set_coord(calculateCoordinates(res['N'], resRef['CA'], resRef['C'], C_O_length, CA_C_O_angle, 180.0))
ghost= Atom("N", calculateCoordinates(res['N'], res['CA'], res['C'], peptide_bond, CA_C_N_angle, psi_im1), 0.0 , 0.0, " ","N", 0, "N")
res['O'].set_coord(calculateCoordinates( res['N'], res['CA'], res['C'], C_O_length, CA_C_O_angle, 180.0))
structure[0]['A'].add(res)
return structure | Adds a residue to chain A model 0 of the given structure, and
returns the new structure. The residue to be added is determined by
the geometry object given as second argument.
This function is a helper function and should not normally be called
directly. Call add_residue() instead. | entailment |
def make_extended_structure(AA_chain):
'''Place a sequence of amino acids into a peptide in the extended
conformation. The argument AA_chain holds the sequence of amino
acids to be used.'''
geo = geometry(AA_chain[0])
struc=initialize_res(geo)
for i in range(1,len(AA_chain)):
AA = AA_chain[i]
geo = geometry(AA)
add_residue(struc, geo)
return struc | Place a sequence of amino acids into a peptide in the extended
conformation. The argument AA_chain holds the sequence of amino
acids to be used. | entailment |
def add_residue(structure, residue, phi=-120, psi_im1=140, omega=-370):
'''Adds a residue to chain A model 0 of the given structure, and
returns the new structure. The residue to be added can be specified
in two ways: either as a geometry object (in which case
the remaining arguments phi, psi_im1, and omega are ignored) or as a
single-letter amino-acid code. In the latter case, the optional
arguments phi, psi_im1, and omega specify the corresponding backbone
angles.
When omega is specified, it needs to be a value greater than or equal
to -360. Values below -360 are ignored.'''
if isinstance( residue, Geo ):
geo = residue
else:
geo=geometry(residue)
geo.phi=phi
geo.psi_im1=psi_im1
if omega>-361:
geo.omega=omega
add_residue_from_geo(structure, geo)
return structure | Adds a residue to chain A model 0 of the given structure, and
returns the new structure. The residue to be added can be specified
in two ways: either as a geometry object (in which case
the remaining arguments phi, psi_im1, and omega are ignored) or as a
single-letter amino-acid code. In the latter case, the optional
arguments phi, psi_im1, and omega specify the corresponding backbone
angles.
When omega is specified, it needs to be a value greater than or equal
to -360. Values below -360 are ignored. | entailment |
def make_structure(AA_chain,phi,psi_im1,omega=[]):
'''Place a sequence of amino acids into a peptide with specified
backbone dihedral angles. The argument AA_chain holds the
sequence of amino acids to be used. The arguments phi and psi_im1 hold
lists of backbone angles, one for each amino acid, *starting from
the second amino acid in the chain*. The argument
omega (optional) holds a list of omega angles, also starting from
the second amino acid in the chain.'''
geo = geometry(AA_chain[0])
struc=initialize_res(geo)
if len(omega)==0:
for i in range(1,len(AA_chain)):
AA = AA_chain[i]
add_residue(struc, AA, phi[i-1], psi_im1[i-1])
else:
for i in range(1,len(AA_chain)):
AA = AA_chain[i]
add_residue(struc, AA, phi[i-1], psi_im1[i-1], omega[i-1])
return struc | Place a sequence of amino acids into a peptide with specified
backbone dihedral angles. The argument AA_chain holds the
sequence of amino acids to be used. The arguments phi and psi_im1 hold
lists of backbone angles, one for each amino acid, *starting from
the second amino acid in the chain*. The argument
omega (optional) holds a list of omega angles, also starting from
the second amino acid in the chain. | entailment |
def make_structure_from_geos(geos):
'''Creates a structure out of a list of geometry objects.'''
model_structure=initialize_res(geos[0])
for i in range(1,len(geos)):
model_structure=add_residue(model_structure, geos[i])
return model_structure | Creates a structure out of a list of geometry objects. | entailment |
def geometry(AA):
'''Generates the geometry of the requested amino acid.
The amino acid needs to be specified by its single-letter
code. If an invalid code is specified, the function
returns the geometry of Glycine.'''
if(AA=='G'):
return GlyGeo()
elif(AA=='A'):
return AlaGeo()
elif(AA=='S'):
return SerGeo()
elif(AA=='C'):
return CysGeo()
elif(AA=='V'):
return ValGeo()
elif(AA=='I'):
return IleGeo()
elif(AA=='L'):
return LeuGeo()
elif(AA=='T'):
return ThrGeo()
elif(AA=='R'):
return ArgGeo()
elif(AA=='K'):
return LysGeo()
elif(AA=='D'):
return AspGeo()
elif(AA=='E'):
return GluGeo()
elif(AA=='N'):
return AsnGeo()
elif(AA=='Q'):
return GlnGeo()
elif(AA=='M'):
return MetGeo()
elif(AA=='H'):
return HisGeo()
elif(AA=='P'):
return ProGeo()
elif(AA=='F'):
return PheGeo()
elif(AA=='Y'):
return TyrGeo()
elif(AA=='W'):
return TrpGeo()
else:
return GlyGeo() | Generates the geometry of the requested amino acid.
The amino acid needs to be specified by its single-letter
code. If an invalid code is specified, the function
returns the geometry of Glycine. | entailment |
def enregister(svc, newAddress, password):
"""
Register a new account and return a Deferred that fires if it worked.
@param svc: a Q2QService
@param newAddress: a Q2QAddress object
@param password: a shared secret (str)
"""
return svc.connectQ2Q(q2q.Q2QAddress("",""),
q2q.Q2QAddress(newAddress.domain, "accounts"),
'identity-admin',
protocol.ClientFactory.forProtocol(AMP)
).addCallback(
AMP.callRemote,
AddUser,
name=newAddress.resource,
password=password
).addErrback(
Failure.trap,
error.ConnectionDone
) | Register a new account and return a Deferred that fires if it worked.
@param svc: a Q2QService
@param newAddress: a Q2QAddress object
@param password: a shared secret (str) | entailment |
def connectCached(self, endpoint, protocolFactory,
extraWork=lambda x: x,
extraHash=None):
"""
See module docstring
@param endpoint:
@param protocolFactory:
@param extraWork:
@param extraHash:
@return: the D
"""
key = endpoint, extraHash
D = Deferred()
if key in self.cachedConnections:
D.callback(self.cachedConnections[key])
elif key in self.inProgress:
self.inProgress[key].append(D)
else:
self.inProgress[key] = [D]
endpoint.connect(
_CachingClientFactory(
self, key, protocolFactory,
extraWork))
return D | See module docstring
@param endpoint:
@param protocolFactory:
@param extraWork:
@param extraHash:
@return: the D | entailment |
def connectionLostForKey(self, key):
"""
Remove lost connection from cache.
@param key: key of connection that was lost
@type key: L{tuple} of L{IAddress} and C{extraHash}
"""
if key in self.cachedConnections:
del self.cachedConnections[key]
if self._shuttingDown and self._shuttingDown.get(key):
d, self._shuttingDown[key] = self._shuttingDown[key], None
d.callback(None) | Remove lost connection from cache.
@param key: key of connection that was lost
@type key: L{tuple} of L{IAddress} and C{extraHash} | entailment |
def shutdown(self):
"""
Disconnect all cached connections.
@returns: a deferred that fires once all connection are disconnected.
@rtype: L{Deferred}
"""
self._shuttingDown = {key: Deferred()
for key in self.cachedConnections.keys()}
return DeferredList(
[maybeDeferred(p.transport.loseConnection)
for p in self.cachedConnections.values()]
+ self._shuttingDown.values()) | Disconnect all cached connections.
@returns: a deferred that fires once all connection are disconnected.
@rtype: L{Deferred} | entailment |
def parse_state(state):
"""Convert a bool, or string, into a bool.
The string pairs we respond to (case insensitively) are:
- ALLOW & DENY
- GRANT & REJECT
:returns bool: ``True`` or ``False``.
:raises ValueError: when not a ``bool`` or one of the above strings.
E.g.::
>>> parse_state('Allow')
True
"""
if isinstance(state, bool):
return state
if not isinstance(state, basestring):
raise TypeError('ACL state must be bool or string')
try:
return _state_strings[state.lower()]
except KeyError:
raise ValueError('unknown ACL state string') | Convert a bool, or string, into a bool.
The string pairs we respond to (case insensitively) are:
- ALLOW & DENY
- GRANT & REJECT
:returns bool: ``True`` or ``False``.
:raises ValueError: when not a ``bool`` or one of the above strings.
E.g.::
>>> parse_state('Allow')
True | entailment |
def _getattr_path(obj, path):
"""
getattr for a dot separated path
If an AttributeError is raised, it will return None.
"""
if not path:
return None
for attr in path.split('.'):
obj = getattr(obj, attr, None)
return obj | getattr for a dot separated path
If an AttributeError is raised, it will return None. | entailment |
def _get_settings_from_request(request):
"""Extracts Zipkin attributes and configuration from request attributes.
See the `zipkin_span` context in py-zipkin for more detaied information on
all the settings.
Here are the supported Pyramid registry settings:
zipkin.create_zipkin_attr: allows the service to override the creation of
Zipkin attributes. For example, if you want to deterministically
calculate trace ID from some service-specific attributes.
zipkin.transport_handler: how py-zipkin will log the spans it generates.
zipkin.stream_name: an additional parameter to be used as the first arg
to the transport_handler function. A good example is a Kafka topic.
zipkin.add_logging_annotation: if true, the outermost span in this service
will have an annotation set when py-zipkin begins its logging.
zipkin.report_root_timestamp: if true, the outermost span in this service
will set its timestamp and duration attributes. Use this only if this
service is not going to have a corresponding client span. See
https://github.com/Yelp/pyramid_zipkin/issues/68
zipkin.firehose_handler: [EXPERIMENTAL] this enables "firehose tracing",
which will log 100% of the spans to this handler, regardless of
sampling decision. This is experimental and may change or be removed
at any time without warning.
zipkin.use_pattern_as_span_name: if true, we'll use the pyramid route pattern
as span name. If false (default) we'll keep using the raw url path.
"""
settings = request.registry.settings
# Creates zipkin_attrs and attaches a zipkin_trace_id attr to the request
if 'zipkin.create_zipkin_attr' in settings:
zipkin_attrs = settings['zipkin.create_zipkin_attr'](request)
else:
zipkin_attrs = create_zipkin_attr(request)
if 'zipkin.transport_handler' in settings:
transport_handler = settings['zipkin.transport_handler']
if not isinstance(transport_handler, BaseTransportHandler):
warnings.warn(
'Using a function as transport_handler is deprecated. '
'Please extend py_zipkin.transport.BaseTransportHandler',
DeprecationWarning,
)
stream_name = settings.get('zipkin.stream_name', 'zipkin')
transport_handler = functools.partial(transport_handler, stream_name)
else:
raise ZipkinError(
"`zipkin.transport_handler` is a required config property, which"
" is missing. Have a look at py_zipkin's docs for how to implement"
" it: https://github.com/Yelp/py_zipkin#transport"
)
context_stack = _getattr_path(request, settings.get('zipkin.request_context'))
service_name = settings.get('service_name', 'unknown')
span_name = '{0} {1}'.format(request.method, request.path)
add_logging_annotation = settings.get(
'zipkin.add_logging_annotation',
False,
)
# If the incoming request doesn't have Zipkin headers, this request is
# assumed to be the root span of a trace. There's also a configuration
# override to allow services to write their own logic for reporting
# timestamp/duration.
if 'zipkin.report_root_timestamp' in settings:
report_root_timestamp = settings['zipkin.report_root_timestamp']
else:
report_root_timestamp = 'X-B3-TraceId' not in request.headers
zipkin_host = settings.get('zipkin.host')
zipkin_port = settings.get('zipkin.port', request.server_port)
firehose_handler = settings.get('zipkin.firehose_handler')
post_handler_hook = settings.get('zipkin.post_handler_hook')
max_span_batch_size = settings.get('zipkin.max_span_batch_size')
use_pattern_as_span_name = bool(
settings.get('zipkin.use_pattern_as_span_name', False),
)
encoding = settings.get('zipkin.encoding', Encoding.V1_THRIFT)
return _ZipkinSettings(
zipkin_attrs,
transport_handler,
service_name,
span_name,
add_logging_annotation,
report_root_timestamp,
zipkin_host,
zipkin_port,
context_stack,
firehose_handler,
post_handler_hook,
max_span_batch_size,
use_pattern_as_span_name,
encoding=encoding,
) | Extracts Zipkin attributes and configuration from request attributes.
See the `zipkin_span` context in py-zipkin for more detaied information on
all the settings.
Here are the supported Pyramid registry settings:
zipkin.create_zipkin_attr: allows the service to override the creation of
Zipkin attributes. For example, if you want to deterministically
calculate trace ID from some service-specific attributes.
zipkin.transport_handler: how py-zipkin will log the spans it generates.
zipkin.stream_name: an additional parameter to be used as the first arg
to the transport_handler function. A good example is a Kafka topic.
zipkin.add_logging_annotation: if true, the outermost span in this service
will have an annotation set when py-zipkin begins its logging.
zipkin.report_root_timestamp: if true, the outermost span in this service
will set its timestamp and duration attributes. Use this only if this
service is not going to have a corresponding client span. See
https://github.com/Yelp/pyramid_zipkin/issues/68
zipkin.firehose_handler: [EXPERIMENTAL] this enables "firehose tracing",
which will log 100% of the spans to this handler, regardless of
sampling decision. This is experimental and may change or be removed
at any time without warning.
zipkin.use_pattern_as_span_name: if true, we'll use the pyramid route pattern
as span name. If false (default) we'll keep using the raw url path. | entailment |
def zipkin_tween(handler, registry):
"""
Factory for pyramid tween to handle zipkin server logging. Note that even
if the request isn't sampled, Zipkin attributes are generated and pushed
into threadlocal storage, so `create_http_headers_for_new_span` and
`zipkin_span` will have access to the proper Zipkin state.
Consumes custom create_zipkin_attr function if one is set in the pyramid
registry.
:param handler: pyramid request handler
:param registry: pyramid app registry
:returns: pyramid tween
"""
def tween(request):
zipkin_settings = _get_settings_from_request(request)
tracer = get_default_tracer()
tween_kwargs = dict(
service_name=zipkin_settings.service_name,
span_name=zipkin_settings.span_name,
zipkin_attrs=zipkin_settings.zipkin_attrs,
transport_handler=zipkin_settings.transport_handler,
host=zipkin_settings.host,
port=zipkin_settings.port,
add_logging_annotation=zipkin_settings.add_logging_annotation,
report_root_timestamp=zipkin_settings.report_root_timestamp,
context_stack=zipkin_settings.context_stack,
max_span_batch_size=zipkin_settings.max_span_batch_size,
encoding=zipkin_settings.encoding,
kind=Kind.SERVER,
)
if zipkin_settings.firehose_handler is not None:
tween_kwargs['firehose_handler'] = zipkin_settings.firehose_handler
with tracer.zipkin_span(**tween_kwargs) as zipkin_context:
response = handler(request)
if zipkin_settings.use_pattern_as_span_name and request.matched_route:
zipkin_context.override_span_name('{} {}'.format(
request.method,
request.matched_route.pattern,
))
zipkin_context.update_binary_annotations(
get_binary_annotations(request, response),
)
if zipkin_settings.post_handler_hook:
zipkin_settings.post_handler_hook(request, response)
return response
return tween | Factory for pyramid tween to handle zipkin server logging. Note that even
if the request isn't sampled, Zipkin attributes are generated and pushed
into threadlocal storage, so `create_http_headers_for_new_span` and
`zipkin_span` will have access to the proper Zipkin state.
Consumes custom create_zipkin_attr function if one is set in the pyramid
registry.
:param handler: pyramid request handler
:param registry: pyramid app registry
:returns: pyramid tween | entailment |
def get_skill_data(self):
""" generates tuples of name, path, url, sha """
path_to_sha = {
folder: sha for folder, sha in self.get_shas()
}
modules = self.read_file('.gitmodules').split('[submodule "')
for i, module in enumerate(modules):
if not module:
continue
try:
name = module.split('"]')[0].strip()
path = module.split('path = ')[1].split('\n')[0].strip()
url = module.split('url = ')[1].strip()
sha = path_to_sha.get(path, '')
yield name, path, url, sha
except (ValueError, IndexError) as e:
LOG.warning('Failed to parse submodule "{}" #{}:{}'.format(
locals().get('name', ''), i, e
)) | generates tuples of name, path, url, sha | entailment |
def expectAck(self):
"""
When the most recent packet produced as an output of this state machine
is acknowledged by our peer, generate a single 'ack' input.
"""
last = self.lastTransmitted
self.ackPredicate = lambda ackPacket: (
ackPacket.relativeAck() >= last.relativeSeq()
) | When the most recent packet produced as an output of this state machine
is acknowledged by our peer, generate a single 'ack' input. | entailment |
def maybeReceiveAck(self, ackPacket):
"""
Receive an L{ack} or L{synAck} input from the given packet.
"""
ackPredicate = self.ackPredicate
self.ackPredicate = lambda packet: False
if ackPacket.syn:
# New SYN packets are always news.
self.synAck()
return
if ackPredicate(ackPacket):
self.ack() | Receive an L{ack} or L{synAck} input from the given packet. | entailment |
def _set_options():
"""
Set the options for CleanCommand.
There are a number of reasons that this has to be done in an
external function instead of inline in the class. First of all,
the setuptools machinery really wants the options to be defined
in a class attribute - otherwise, the help command doesn't work
so we need a class attribute. However, we are extending an
existing command and do not want to "monkey patch" over it so
we need to define a *new* class attribute with the same name
that contains a copy of the base class value. This could be
accomplished using some magic in ``__new__`` but I would much
rather set the class attribute externally... it's just cleaner.
"""
CleanCommand.user_options = _CleanCommand.user_options[:]
CleanCommand.user_options.extend([
('dist', 'd', 'remove distribution directory'),
('eggs', None, 'remove egg and egg-info directories'),
('environment', 'E', 'remove virtual environment directory'),
('pycache', 'p', 'remove __pycache__ directories'),
('egg-base=', 'e',
'directory containing .egg-info directories '
'(default: top of the source tree)'),
('virtualenv-dir=', None,
'root directory for the virtual directory '
'(default: value of VIRTUAL_ENV environment variable)'),
])
CleanCommand.boolean_options = _CleanCommand.boolean_options[:]
CleanCommand.boolean_options.extend(
['dist', 'eggs', 'environment', 'pycache']) | Set the options for CleanCommand.
There are a number of reasons that this has to be done in an
external function instead of inline in the class. First of all,
the setuptools machinery really wants the options to be defined
in a class attribute - otherwise, the help command doesn't work
so we need a class attribute. However, we are extending an
existing command and do not want to "monkey patch" over it so
we need to define a *new* class attribute with the same name
that contains a copy of the base class value. This could be
accomplished using some magic in ``__new__`` but I would much
rather set the class attribute externally... it's just cleaner. | entailment |
def activate_output(self, universe: int) -> None:
"""
Activates a universe that's then starting to sending every second.
See http://tsp.esta.org/tsp/documents/docs/E1-31-2016.pdf for more information
:param universe: the universe to activate
"""
check_universe(universe)
# check, if the universe already exists in the list:
if universe in self._outputs:
return
# add new sending:
new_output = Output(DataPacket(cid=self.__CID, sourceName=self.source_name, universe=universe))
self._outputs[universe] = new_output | Activates a universe that's then starting to sending every second.
See http://tsp.esta.org/tsp/documents/docs/E1-31-2016.pdf for more information
:param universe: the universe to activate | entailment |
def deactivate_output(self, universe: int) -> None:
"""
Deactivates an existing sending. Every data from the existing sending output will be lost.
(TTL, Multicast, DMX data, ..)
:param universe: the universe to deactivate. If the universe was not activated before, no error is raised
"""
check_universe(universe)
try: # try to send out three messages with stream_termination bit set to 1
self._outputs[universe]._packet.option_StreamTerminated = True
for i in range(0, 3):
self._output_thread.send_out(self._outputs[universe])
except:
pass
try:
del self._outputs[universe]
except:
pass | Deactivates an existing sending. Every data from the existing sending output will be lost.
(TTL, Multicast, DMX data, ..)
:param universe: the universe to deactivate. If the universe was not activated before, no error is raised | entailment |
def move_universe(self, universe_from: int, universe_to: int) -> None:
"""
Moves an sending from one universe to another. All settings are being restored and only the universe changes
:param universe_from: the universe that should be moved
:param universe_to: the target universe. An existing universe will be overwritten
"""
check_universe(universe_from)
check_universe(universe_to)
# store the sending object and change the universe in the packet of the sending
tmp_output = self._outputs[universe_from]
tmp_output._packet.universe = universe_to
# deactivate sending
self.deactivate_output(universe_from)
# activate new sending with the new universe
self._outputs[universe_to] = tmp_output | Moves an sending from one universe to another. All settings are being restored and only the universe changes
:param universe_from: the universe that should be moved
:param universe_to: the target universe. An existing universe will be overwritten | entailment |
def start(self, bind_address=None, bind_port: int = None, fps: int = None) -> None:
"""
Starts or restarts a new Thread with the parameters given in the constructor or
the parameters given in this function.
The parameters in this function do not override the class specific values!
:param bind_address: the IP-Address to bind to
:param bind_port: the port to bind to
:param fps: the fps to use. Note: this is not precisely hold, use for load balance in the network
"""
if bind_address is None:
bind_address = self.bindAddress
if fps is None:
fps = self._fps
if bind_port is None:
bind_port = self.bind_port
self.stop()
self._output_thread = OutputThread(cid=self.__CID, source_name=self.source_name,
outputs=self._outputs, bind_address=bind_address,
bind_port=bind_port, fps=fps, universe_discovery=self._universeDiscovery)
self._output_thread.start() | Starts or restarts a new Thread with the parameters given in the constructor or
the parameters given in this function.
The parameters in this function do not override the class specific values!
:param bind_address: the IP-Address to bind to
:param bind_port: the port to bind to
:param fps: the fps to use. Note: this is not precisely hold, use for load balance in the network | entailment |
def openReadWrite(filename):
"""
Return a 2-tuple of: (whether the file existed before, open file object)
"""
try:
os.makedirs(os.path.dirname(filename))
except OSError:
pass
try:
return file(filename, 'rb+')
except IOError:
return file(filename, 'wb+') | Return a 2-tuple of: (whether the file existed before, open file object) | entailment |
def openMaskFile(filename):
"""
Open the bitmask file sitting next to a file in the filesystem.
"""
dirname, basename = os.path.split(filename)
newbasename = '_%s_.sbm' % (basename,)
maskfname = os.path.join(dirname, newbasename)
maskfile = openReadWrite(maskfname)
return maskfile | Open the bitmask file sitting next to a file in the filesystem. | entailment |
def data(self, name, chunk, body):
"""
Issue a DATA command
return None
Sends a chunk of data to a peer.
"""
self.callRemote(Data, name=name, chunk=chunk, body=body) | Issue a DATA command
return None
Sends a chunk of data to a peer. | entailment |
def get(self, name, mask=None):
"""
Issue a GET command
Return a Deferred which fires with the size of the name being requested
"""
mypeer = self.transport.getQ2QPeer()
tl = self.nexus.transloads[name]
peerz = tl.peers
if mypeer in peerz:
peerk = peerz[mypeer]
else:
# all turned on initially; we aren't going to send them anything.
peerk = PeerKnowledge(bits.BitArray(size=len(tl.mask), default=1))
peerz[mypeer] = peerk
peerk.sentGet = True
return self.callRemote(
Get, name=name, mask=mask).addCallback(lambda r: r['size']) | Issue a GET command
Return a Deferred which fires with the size of the name being requested | entailment |
def connectionLost(self, reason):
"""
Inform the associated L{conncache.ConnectionCache} that this
protocol has been disconnected.
"""
self.nexus.conns.connectionLostForKey((endpoint.Q2QEndpoint(
self.nexus.svc,
self.nexus.addr,
self.transport.getQ2QPeer(),
PROTOCOL_NAME), None))
AMP.connectionLost(self, reason) | Inform the associated L{conncache.ConnectionCache} that this
protocol has been disconnected. | entailment |
def sendSomeData(self, howMany):
"""
Send some DATA commands to my peer(s) to relay some data.
@param howMany: an int, the number of chunks to send out.
"""
# print 'sending some data', howMany
if self.transport is None:
return
peer = self.transport.getQ2QPeer()
while howMany > 0:
# sort transloads so that the least-frequently-serviced ones will
# come first
tloads = [
(findin(tl.name, self.sentTransloads),
tl) for tl in self.nexus.transloadsForPeer(peer)]
tloads.sort()
tloads = [tl for (idx, tl) in tloads if tl.peerNeedsData(peer)]
if not tloads:
break
wasHowMany = howMany
for myTransload in tloads:
# move this transload to the end so it will be sorted last next
# time.
name = myTransload.name
if name in self.sentTransloads:
self.sentTransloads.remove(name)
self.sentTransloads.append(name)
knowledge = myTransload.peers[peer]
chunkNumber, chunkData = myTransload.selectOptimalChunk(peer)
if chunkNumber is None:
continue
peerToIntroduce = knowledge.selectPeerToIntroduce(
myTransload.peers.keys())
if peerToIntroduce is not None:
self.introduce(myTransload.name, peerToIntroduce)
self.data(name, chunkNumber, chunkData)
# Don't re-send that chunk again unless they explicitly tell us
# they need it for some reason
knowledge.mask[chunkNumber] = 1
howMany -= 1
if howMany <= 0:
break
if wasHowMany == howMany:
# couldn't find anything to send.
break | Send some DATA commands to my peer(s) to relay some data.
@param howMany: an int, the number of chunks to send out. | entailment |
def selectPeerToIntroduce(self, otherPeers):
"""
Choose a peer to introduce. Return a q2q address or None, if there are
no suitable peers to introduce at this time.
"""
for peer in otherPeers:
if peer not in self.otherPeers:
self.otherPeers.append(peer)
return peer | Choose a peer to introduce. Return a q2q address or None, if there are
no suitable peers to introduce at this time. | entailment |
def chunkReceived(self, who, chunkNumber, chunkData):
"""
A chunk was received from the peer.
"""
def verifyError(error):
error.trap(VerifyError)
self.nexus.decreaseScore(who, self.authorities)
return self.nexus.verifyChunk(self.name,
who,
chunkNumber,
sha.new(chunkData).digest(),
self.authorities).addCallbacks(
lambda whatever: self.chunkVerified(who, chunkNumber, chunkData),
verifyError) | A chunk was received from the peer. | entailment |
def chunkVerified(self, who, chunkNumber, chunkData):
"""A chunk (#chunkNumber) containing the data C{chunkData} was verified, sent
to us by the Q2QAddress C{who}.
"""
if self.mask[chunkNumber]:
# already received that chunk.
return
self.file.seek(chunkNumber * CHUNK_SIZE)
self.file.write(chunkData)
self.file.flush()
self.sha1sums[chunkNumber] = sha.new(chunkData).digest()
if not self.mask[chunkNumber]:
self.nexus.increaseScore(who)
self.mask[chunkNumber] = 1
self.writeMaskFile()
self.changes += 1
if self.changes > self.maximumChangeCountBeforeMaskUpdate:
self.call.cancel()
self.sendMaskUpdate()
self.call = self.nexus.callLater(
self.maximumChangeCountBeforeMaskUpdate,
self.maybeUpdateMask)
if not self.seed and not self.mask.countbits(0):
# we're done, let's let other people get at that file.
self.file.close()
os.rename(self.incompletePath.path,
self.fullPath.path)
self.file = self.fullPath.open()
self.maskfile.close()
os.unlink(self.maskfile.name)
self.ui.updateHostMask(self.mask) | A chunk (#chunkNumber) containing the data C{chunkData} was verified, sent
to us by the Q2QAddress C{who}. | entailment |
def selectOptimalChunk(self, peer):
"""
select an optimal chunk to send to a peer.
@return: int(chunkNumber), str(chunkData) if there is data to be sent,
otherwise None, None
"""
# stuff I have
have = sets.Set(self.mask.positions(1))
# stuff that this peer wants
want = sets.Set(self.peers[peer].mask.positions(0))
exchangeable = have.intersection(want)
finalSet = dict.fromkeys(exchangeable, 0)
# taking a page from bittorrent, rarest-first
for chunkNumber in exchangeable:
for otherPeer in self.peers.itervalues():
finalSet[chunkNumber] += not otherPeer.mask[chunkNumber]
rarityList = [(rarity, random.random(), chunkNumber)
for (chunkNumber, rarity)
in finalSet.iteritems()]
if not rarityList:
return None, None
rarityList.sort()
chunkNumber = rarityList[-1][-1] # sorted in ascending order of rarity
# sanity check
assert self.mask[chunkNumber], "I wanted to send a chunk I didn't have"
self.file.seek(chunkNumber * CHUNK_SIZE)
chunkData = self.file.read(CHUNK_SIZE)
self.sha1sums[chunkNumber] = sha.new(chunkData).digest()
return chunkNumber, chunkData | select an optimal chunk to send to a peer.
@return: int(chunkNumber), str(chunkData) if there is data to be sent,
otherwise None, None | entailment |
def allocateFile(self, sharename, peer):
"""
return a 2-tuple of incompletePath, fullPath
"""
peerDir = self.basepath.child(str(peer))
if not peerDir.isdir():
peerDir.makedirs()
return (peerDir.child(sharename+'.incomplete'),
peerDir.child(sharename)) | return a 2-tuple of incompletePath, fullPath | entailment |
def transloadsForPeer(self, peer):
"""
Returns an iterator of transloads that apply to a particular peer.
"""
for tl in self.transloads.itervalues():
if peer in tl.peers:
yield tl | Returns an iterator of transloads that apply to a particular peer. | entailment |
def seed(self, path, name):
"""Create a transload from an existing file that is complete.
"""
t = self.transloads[name] = Transload(self.addr, self, name,
None, path,
self.ui.startTransload(name,
self.addr),
seed=True)
return t | Create a transload from an existing file that is complete. | entailment |
def connectPeer(self, peer):
"""Establish a SIGMA connection to the given peer.
@param peer: a Q2QAddress of a peer which has a file that I want
@return: a Deferred which fires a SigmaProtocol.
"""
return self.conns.connectCached(endpoint.Q2QEndpoint(self.svc,
self.addr,
peer,
PROTOCOL_NAME),
self.clientFactory) | Establish a SIGMA connection to the given peer.
@param peer: a Q2QAddress of a peer which has a file that I want
@return: a Deferred which fires a SigmaProtocol. | entailment |
def increaseScore(self, participant):
"""
The participant successfully transferred a chunk to me.
"""
if participant not in self.scores:
self.scores[participant] = 0
self.scores[participant] += 1 | The participant successfully transferred a chunk to me. | entailment |
def listen_on(self, trigger: str, **kwargs) -> callable:
"""
This is a simple decorator for registering a callback for an event. You can also use 'register_listener'.
A list with all possible options is available via LISTEN_ON_OPTIONS.
:param trigger: Currently supported options: 'universe availability change', 'universe'
"""
def decorator(f):
self.register_listener(trigger, f, **kwargs)
return f
return decorator | This is a simple decorator for registering a callback for an event. You can also use 'register_listener'.
A list with all possible options is available via LISTEN_ON_OPTIONS.
:param trigger: Currently supported options: 'universe availability change', 'universe' | entailment |
def register_listener(self, trigger: str, func: callable, **kwargs) -> None:
"""
Register a listener for the given trigger. Raises an TypeError when the trigger is not a valid one.
To get a list with all valid triggers, use LISTEN_ON_OPTIONS.
:param trigger: the trigger on which the given callback should be used.
Currently supported: 'universe availability change', 'universe'
:param func: the callback. The parameters depend on the trigger. See README for more information
"""
if trigger in LISTEN_ON_OPTIONS:
if trigger == LISTEN_ON_OPTIONS[1]: # if the trigger is universe, use the universe from args as key
try:
self._callbacks[kwargs[LISTEN_ON_OPTIONS[1]]].append(func)
except:
self._callbacks[kwargs[LISTEN_ON_OPTIONS[1]]] = [func]
try:
self._callbacks[trigger].append(func)
except:
self._callbacks[trigger] = [func]
else:
raise TypeError(f'The given trigger "{trigger}" is not a valid one!') | Register a listener for the given trigger. Raises an TypeError when the trigger is not a valid one.
To get a list with all valid triggers, use LISTEN_ON_OPTIONS.
:param trigger: the trigger on which the given callback should be used.
Currently supported: 'universe availability change', 'universe'
:param func: the callback. The parameters depend on the trigger. See README for more information | entailment |
def join_multicast(self, universe: int) -> None:
"""
Joins the multicast address that is used for the given universe. Note: If you are on Windows you must have given
a bind IP-Address for this feature to function properly. On the other hand you are not allowed to set a bind
address if you are on any other OS.
:param universe: the universe to join the multicast group.
The network hardware has to support the multicast feature!
"""
self.sock.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(calculate_multicast_addr(universe)) +
socket.inet_aton(self._bindAddress)) | Joins the multicast address that is used for the given universe. Note: If you are on Windows you must have given
a bind IP-Address for this feature to function properly. On the other hand you are not allowed to set a bind
address if you are on any other OS.
:param universe: the universe to join the multicast group.
The network hardware has to support the multicast feature! | entailment |
def leave_multicast(self, universe: int) -> None:
"""
Try to leave the multicast group with the specified universe. This does not throw any exception if the group
could not be leaved.
:param universe: the universe to leave the multicast group.
The network hardware has to support the multicast feature!
"""
try:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(calculate_multicast_addr(universe)) +
socket.inet_aton(self._bindAddress))
except: # try to leave the multicast group for the universe
pass | Try to leave the multicast group with the specified universe. This does not throw any exception if the group
could not be leaved.
:param universe: the universe to leave the multicast group.
The network hardware has to support the multicast feature! | entailment |
def start(self) -> None:
"""
Starts a new thread that handles the input. If a thread is already running, the thread will be restarted.
"""
self.stop() # stop an existing thread
self._thread = receiverThread(socket=self.sock, callbacks=self._callbacks)
self._thread.start() | Starts a new thread that handles the input. If a thread is already running, the thread will be restarted. | entailment |
def predicate(self, name, func=None):
"""Define a new predicate (directly, or as a decorator).
E.g.::
@authz.predicate('ROOT')
def is_root(user, **ctx):
# return True of user is in group "wheel".
"""
if func is None:
return functools.partial(self.predicate, name)
self.predicates[name] = func
return func | Define a new predicate (directly, or as a decorator).
E.g.::
@authz.predicate('ROOT')
def is_root(user, **ctx):
# return True of user is in group "wheel". | entailment |
def permission_set(self, name, func=None):
"""Define a new permission set (directly, or as a decorator).
E.g.::
@authz.permission_set('HTTP')
def is_http_perm(perm):
return perm.startswith('http.')
"""
if func is None:
return functools.partial(self.predicate, name)
self.permission_sets[name] = func
return func | Define a new permission set (directly, or as a decorator).
E.g.::
@authz.permission_set('HTTP')
def is_http_perm(perm):
return perm.startswith('http.') | entailment |
def route_acl(self, *acl, **options):
"""Decorator to attach an ACL to a route.
E.g::
@app.route('/url/to/view')
@authz.route_acl('''
ALLOW WHEEL ALL
DENY ANY ALL
''')
def my_admin_function():
pass
"""
def _route_acl(func):
func.__acl__ = acl
@functools.wraps(func)
def wrapped(*args, **kwargs):
permission = 'http.' + request.method.lower()
local_opts = options.copy()
local_opts.setdefault('default', current_app.config['ACL_ROUTE_DEFAULT_STATE'])
self.assert_can(permission, func, **local_opts)
return func(*args, **kwargs)
return wrapped
return _route_acl | Decorator to attach an ACL to a route.
E.g::
@app.route('/url/to/view')
@authz.route_acl('''
ALLOW WHEEL ALL
DENY ANY ALL
''')
def my_admin_function():
pass | entailment |
def can(self, permission, obj, **kwargs):
"""Check if we can do something with an object.
:param permission: The permission to look for.
:param obj: The object to check the ACL of.
:param **kwargs: The context to pass to predicates.
>>> auth.can('read', some_object)
>>> auth.can('write', another_object, group=some_group)
"""
context = {'user': current_user}
for func in self.context_processors:
context.update(func())
context.update(get_object_context(obj))
context.update(kwargs)
return check(permission, iter_object_acl(obj), **context) | Check if we can do something with an object.
:param permission: The permission to look for.
:param obj: The object to check the ACL of.
:param **kwargs: The context to pass to predicates.
>>> auth.can('read', some_object)
>>> auth.can('write', another_object, group=some_group) | entailment |
def assert_can(self, permission, obj, **kwargs):
"""Make sure we have a permission, or abort the request.
:param permission: The permission to look for.
:param obj: The object to check the ACL of.
:param flash: The message to flask if denied (keyword only).
:param stealth: Abort with a 404? (keyword only).
:param **kwargs: The context to pass to predicates.
"""
flash_message = kwargs.pop('flash', None)
stealth = kwargs.pop('stealth', False)
default = kwargs.pop('default', None)
res = self.can(permission, obj, **kwargs)
res = default if res is None else res
if not res:
if flash_message and not stealth:
flask.flash(flash_message, 'danger')
if current_user.is_authenticated():
if flash_message is not False:
flask.flash(flash_message or 'You are not permitted to "%s" this resource' % permission)
flask.abort(403)
elif not stealth and self.login_view:
if flash_message is not False:
flask.flash(flash_message or 'Please login for access.')
raise _Redirect(flask.url_for(self.login_view) + '?' + urlencode(dict(next=
flask.request.script_root + flask.request.path
)))
else:
flask.abort(404) | Make sure we have a permission, or abort the request.
:param permission: The permission to look for.
:param obj: The object to check the ACL of.
:param flash: The message to flask if denied (keyword only).
:param stealth: Abort with a 404? (keyword only).
:param **kwargs: The context to pass to predicates. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.