sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def update(self, request, key): """Set an email address as primary address.""" request.UPDATE = http.QueryDict(request.body) email_addr = request.UPDATE.get('email') user_id = request.UPDATE.get('user') if not email_addr: return http.HttpResponseBadRequest() try: email = EmailAddress.objects.get(address=email_addr, user_id=user_id) except EmailAddress.DoesNotExist: raise http.Http404 email.user.email = email_addr email.user.save() return http.HttpResponse(status=204)
Set an email address as primary address.
entailment
def is_logged(self, user): """Check if a logged user is trying to access the register page. If so, redirect him/her to his/her profile""" response = None if user.is_authenticated(): if not user.needs_update: response = redirect('user_profile', username=user.username) return response
Check if a logged user is trying to access the register page. If so, redirect him/her to his/her profile
entailment
def get_env_setting(setting): """ Get the environment setting or return exception """ try: return os.environ[setting] except KeyError: error_msg = "Set the %s env variable" % setting raise ImproperlyConfigured(error_msg)
Get the environment setting or return exception
entailment
def parse_json(json_string, object_type, mappers): """ This function will use the custom JsonDecoder and the conventions.mappers to recreate your custom object in the parse json string state just call this method with the json_string your complete object_type and with your mappers dict. the mappers dict must contain in the key the object_type (ex. User) and the value will contain a method that get key, value (the key will be the name of the object property we like to parse and the value will be the properties of the object) """ obj = json.loads(json_string, cls=JsonDecoder, object_mapper=mappers.get(object_type, None)) if obj is not None: try: obj = object_type(**obj) except TypeError: initialize_dict, set_needed = Utils.make_initialize_dict(obj, object_type.__init__) o = object_type(**initialize_dict) if set_needed: for key, value in obj.items(): setattr(o, key, value) obj = o return obj
This function will use the custom JsonDecoder and the conventions.mappers to recreate your custom object in the parse json string state just call this method with the json_string your complete object_type and with your mappers dict. the mappers dict must contain in the key the object_type (ex. User) and the value will contain a method that get key, value (the key will be the name of the object property we like to parse and the value will be the properties of the object)
entailment
def validate_social_account(account, url): """Verifies if a social account is valid. Examples: >>> validate_social_account('seocam', 'http://twitter.com') True >>> validate_social_account('seocam-fake-should-fail', 'http://twitter.com') False """ request = urllib2.Request(urlparse.urljoin(url, account)) request.get_method = lambda: 'HEAD' try: response = urllib2.urlopen(request) except urllib2.HTTPError: return False return response.code == 200
Verifies if a social account is valid. Examples: >>> validate_social_account('seocam', 'http://twitter.com') True >>> validate_social_account('seocam-fake-should-fail', 'http://twitter.com') False
entailment
def fitting_rmsd(w_fit, C_fit, r_fit, Xs): '''Calculate the RMSD of fitting.''' return np.sqrt(sum((geometry.point_line_distance(p, C_fit, w_fit) - r_fit) ** 2 for p in Xs) / len(Xs))
Calculate the RMSD of fitting.
entailment
def basic_parse(response, buf_size=ijson.backend.BUFSIZE): """ Iterator yielding unprefixed events. Parameters: - response: a stream response from requests """ lexer = iter(IncrementalJsonParser.lexer(response, buf_size)) for value in ijson.backend.parse_value(lexer): yield value try: next(lexer) except StopIteration: pass else: raise ijson.common.JSONError('Additional data')
Iterator yielding unprefixed events. Parameters: - response: a stream response from requests
entailment
def connect_to_kafka(self, bootstrap_servers='127.0.0.1:9092', auto_offset_reset='latest', client_id='Robot', **kwargs ): """Connect to kafka - ``bootstrap_servers``: default 127.0.0.1:9092 - ``client_id``: default: Robot """ self.connect_consumer( bootstrap_servers=bootstrap_servers, auto_offset_reset=auto_offset_reset, client_id=client_id, **kwargs ) self.connect_producer(bootstrap_servers=bootstrap_servers, client_id=client_id)
Connect to kafka - ``bootstrap_servers``: default 127.0.0.1:9092 - ``client_id``: default: Robot
entailment
def drop_connection(self, name, database=None): """ Force server to close current client subscription connection to the server @param str name: The name of the subscription @param str database: The name of the database """ request_executor = self._store.get_request_executor(database) command = DropSubscriptionConnectionCommand(name) request_executor.execute(command)
Force server to close current client subscription connection to the server @param str name: The name of the subscription @param str database: The name of the database
entailment
def execute_from_command_line(argv=None): """ A simple method that runs a ManagementUtility. """ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "colab.settings") from django.conf import settings if not hasattr(settings, 'SECRET_KEY') and 'initconfig' in sys.argv: command = initconfig.Command() command.handle() else: utility = ManagementUtility(argv) utility.execute()
A simple method that runs a ManagementUtility.
entailment
def dashboard(request): """Dashboard page""" user = None if request.user.is_authenticated(): user = User.objects.get(username=request.user) latest_results, count_types = get_collaboration_data(user) latest_results.sort(key=lambda elem: elem.modified, reverse=True) context = { 'type_count': count_types, 'latest_results': latest_results[:6], } return render(request, 'home.html', context)
Dashboard page
entailment
def normalize(v): '''Normalize a vector based on its 2 norm.''' if 0 == np.linalg.norm(v): return v return v / np.linalg.norm(v)
Normalize a vector based on its 2 norm.
entailment
def rotation_matrix_from_axis_and_angle(u, theta): '''Calculate a rotation matrix from an axis and an angle.''' x = u[0] y = u[1] z = u[2] s = np.sin(theta) c = np.cos(theta) return np.array([[c + x**2 * (1 - c), x * y * (1 - c) - z * s, x * z * (1 - c) + y * s], [y * x * (1 - c) + z * s, c + y**2 * (1 - c), y * z * (1 - c) - x * s ], [z * x * (1 - c) - y * s, z * y * (1 - c) + x * s, c + z**2 * (1 - c) ]])
Calculate a rotation matrix from an axis and an angle.
entailment
def point_line_distance(p, l_p, l_v): '''Calculate the distance between a point and a line defined by a point and a direction vector. ''' l_v = normalize(l_v) u = p - l_p return np.linalg.norm(u - np.dot(u, l_v) * l_v)
Calculate the distance between a point and a line defined by a point and a direction vector.
entailment
def raw_query(self, query, query_parameters=None): """ To get all the document that equal to the query @param str query: The rql query @param dict query_parameters: Add query parameters to the query {key : value} """ self.assert_no_raw_query() if len(self._where_tokens) != 0 or len(self._select_tokens) != 0 or len( self._order_by_tokens) != 0 or len(self._group_by_tokens) != 0: raise InvalidOperationException( "You can only use raw_query on a new query, without applying any operations " "(such as where, select, order_by, group_by, etc)") if query_parameters: self.query_parameters = query_parameters self._query = query return self
To get all the document that equal to the query @param str query: The rql query @param dict query_parameters: Add query parameters to the query {key : value}
entailment
def where_equals(self, field_name, value, exact=False): """ To get all the document that equal to the value in the given field_name @param str field_name: The field name in the index you want to query. @param value: The value will be the fields value you want to query @param bool exact: If True getting exact match of the query """ if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() token = "equals" if self.negate: self.negate = False token = "not_equals" self.last_equality = {field_name: value} token = _Token(field_name=field_name, value=self.add_query_parameter(value), token=token, exact=exact) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
To get all the document that equal to the value in the given field_name @param str field_name: The field name in the index you want to query. @param value: The value will be the fields value you want to query @param bool exact: If True getting exact match of the query
entailment
def where(self, exact=False, **kwargs): """ To get all the document that equal to the value within kwargs with the specific key @param bool exact: If True getting exact match of the query @param kwargs: the keys of the kwargs will be the fields name in the index you want to query. The value will be the the fields value you want to query (if kwargs[field_name] is a list it will behave has the where_in method) """ for field_name in kwargs: if isinstance(kwargs[field_name], list): self.where_in(field_name, kwargs[field_name], exact) else: self.where_equals(field_name, kwargs[field_name], exact) return self
To get all the document that equal to the value within kwargs with the specific key @param bool exact: If True getting exact match of the query @param kwargs: the keys of the kwargs will be the fields name in the index you want to query. The value will be the the fields value you want to query (if kwargs[field_name] is a list it will behave has the where_in method)
entailment
def search(self, field_name, search_terms, operator=QueryOperator.OR): """ For more complex text searching @param str field_name: The field name in the index you want to query. :type str @param str search_terms: The terms you want to query @param QueryOperator operator: OR or AND """ if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() self.negate_if_needed(field_name) self.last_equality = {field_name: "(" + search_terms + ")" if ' ' in search_terms else search_terms} token = _Token(field_name=field_name, token="search", value=self.add_query_parameter(search_terms), search_operator=operator) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
For more complex text searching @param str field_name: The field name in the index you want to query. :type str @param str search_terms: The terms you want to query @param QueryOperator operator: OR or AND
entailment
def where_ends_with(self, field_name, value): """ To get all the document that ends with the value in the giving field_name @param str field_name:The field name in the index you want to query. @param str value: The value will be the fields value you want to query """ if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() self.negate_if_needed(field_name) self.last_equality = {field_name: value} token = _Token(field_name=field_name, token="endsWith", value=self.add_query_parameter(value)) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
To get all the document that ends with the value in the giving field_name @param str field_name:The field name in the index you want to query. @param str value: The value will be the fields value you want to query
entailment
def where_in(self, field_name, values, exact=False): """ Check that the field has one of the specified values @param str field_name: Name of the field @param str values: The values we wish to query @param bool exact: Getting the exact query (ex. case sensitive) """ field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() self.negate_if_needed(field_name) token = _Token(field_name=field_name, value=self.add_query_parameter(list(Utils.unpack_iterable(values))), token="in", exact=exact) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
Check that the field has one of the specified values @param str field_name: Name of the field @param str values: The values we wish to query @param bool exact: Getting the exact query (ex. case sensitive)
entailment
def to_facets(self, facets, start=0, page_size=None): """ Query the facets results for this query using the specified list of facets with the given start and pageSize @param List[Facet] facets: List of facets @param int start: Start index for paging @param page_size: Paging PageSize. If set, overrides Facet.max_result """ if len(facets) == 0: raise ValueError("Facets must contain at least one entry", "facets") str_query = self.__str__() facet_query = FacetQuery(str_query, None, facets, start, page_size, query_parameters=self.query_parameters, wait_for_non_stale_results=self.wait_for_non_stale_results, wait_for_non_stale_results_timeout=self.timeout, cutoff_etag=self.cutoff_etag) command = GetFacetsCommand(query=facet_query) return self.session.requests_executor.execute(command)
Query the facets results for this query using the specified list of facets with the given start and pageSize @param List[Facet] facets: List of facets @param int start: Start index for paging @param page_size: Paging PageSize. If set, overrides Facet.max_result
entailment
def show_G_distribution(data): '''Show the distribution of the G function.''' Xs, t = fitting.preprocess_data(data) Theta, Phi = np.meshgrid(np.linspace(0, np.pi, 50), np.linspace(0, 2 * np.pi, 50)) G = [] for i in range(len(Theta)): G.append([]) for j in range(len(Theta[i])): w = fitting.direction(Theta[i][j], Phi[i][j]) G[-1].append(fitting.G(w, Xs)) plt.imshow(G, extent=[0, np.pi, 0, 2 * np.pi], origin='lower') plt.show()
Show the distribution of the G function.
entailment
def show_fit(w_fit, C_fit, r_fit, Xs): '''Plot the fitting given the fitted axis direction, the fitted center, the fitted radius and the data points. ''' fig = plt.figure() ax = fig.gca(projection='3d') # Plot the data points ax.scatter([X[0] for X in Xs], [X[1] for X in Xs], [X[2] for X in Xs]) # Get the transformation matrix theta = np.arccos(np.dot(w_fit, np.array([0, 0, 1]))) phi = np.arctan2(w_fit[1], w_fit[0]) M = np.dot(rotation_matrix_from_axis_and_angle(np.array([0, 0, 1]), phi), rotation_matrix_from_axis_and_angle(np.array([0, 1, 0]), theta)) # Plot the cylinder surface delta = np.linspace(-np.pi, np.pi, 20) z = np.linspace(-10, 10, 20) Delta, Z = np.meshgrid(delta, z) X = r_fit * np.cos(Delta) Y = r_fit * np.sin(Delta) for i in range(len(X)): for j in range(len(X[i])): p = np.dot(M, np.array([X[i][j], Y[i][j], Z[i][j]])) + C_fit X[i][j] = p[0] Y[i][j] = p[1] Z[i][j] = p[2] ax.plot_surface(X, Y, Z, alpha=0.2) # Plot the center and direction ax.quiver(C_fit[0], C_fit[1], C_fit[2], r_fit * w_fit[0], r_fit * w_fit[1], r_fit * w_fit[2], color='red') plt.show()
Plot the fitting given the fitted axis direction, the fitted center, the fitted radius and the data points.
entailment
def find_window(self, highlight_locations): """Getting the HIGHLIGHT_NUM_CHARS_BEFORE_MATCH setting to find how many characters before the first word found should be removed from the window """ if len(self.text_block) <= self.max_length: return (0, self.max_length) num_chars_before = getattr( settings, 'HIGHLIGHT_NUM_CHARS_BEFORE_MATCH', 0 ) best_start, best_end = super(ColabHighlighter, self).find_window( highlight_locations ) if best_start <= num_chars_before: best_end -= best_start best_start = 0 else: best_start -= num_chars_before best_end -= num_chars_before return (best_start, best_end)
Getting the HIGHLIGHT_NUM_CHARS_BEFORE_MATCH setting to find how many characters before the first word found should be removed from the window
entailment
def __set(self, key, real_value, coded_value): """Private method for setting a cookie's value""" morse_set = self.get(key, StringMorsel()) morse_set.set(key, real_value, coded_value) dict.__setitem__(self, key, morse_set)
Private method for setting a cookie's value
entailment
def login(self): """ Try to login and set the internal session id. Please note: - Any failed login resets all existing session ids, even of other users. - SIDs expire after some time """ response = self.session.get(self.base_url + '/login_sid.lua', timeout=10) xml = ET.fromstring(response.text) if xml.find('SID').text == "0000000000000000": challenge = xml.find('Challenge').text url = self.base_url + "/login_sid.lua" response = self.session.get(url, params={ "username": self.username, "response": self.calculate_response(challenge, self.password), }, timeout=10) xml = ET.fromstring(response.text) sid = xml.find('SID').text if xml.find('SID').text == "0000000000000000": blocktime = int(xml.find('BlockTime').text) exc = Exception("Login failed, please wait {} seconds".format( blocktime )) exc.blocktime = blocktime raise exc self.sid = sid return sid
Try to login and set the internal session id. Please note: - Any failed login resets all existing session ids, even of other users. - SIDs expire after some time
entailment
def calculate_response(self, challenge, password): """Calculate response for the challenge-response authentication""" to_hash = (challenge + "-" + password).encode("UTF-16LE") hashed = hashlib.md5(to_hash).hexdigest() return "{0}-{1}".format(challenge, hashed)
Calculate response for the challenge-response authentication
entailment
def get_actors(self): """ Returns a list of Actor objects for querying SmartHome devices. This is currently the only working method for getting temperature data. """ devices = self.homeautoswitch("getdevicelistinfos") xml = ET.fromstring(devices) actors = [] for device in xml.findall('device'): actors.append(Actor(fritzbox=self, device=device)) return actors
Returns a list of Actor objects for querying SmartHome devices. This is currently the only working method for getting temperature data.
entailment
def get_actor_by_ain(self, ain): """ Return a actor identified by it's ain or return None """ for actor in self.get_actors(): if actor.actor_id == ain: return actor
Return a actor identified by it's ain or return None
entailment
def homeautoswitch(self, cmd, ain=None, param=None): """ Call a switch method. Should only be used by internal library functions. """ assert self.sid, "Not logged in" params = { 'switchcmd': cmd, 'sid': self.sid, } if param is not None: params['param'] = param if ain: params['ain'] = ain url = self.base_url + '/webservices/homeautoswitch.lua' response = self.session.get(url, params=params, timeout=10) response.raise_for_status() return response.text.strip().encode('utf-8')
Call a switch method. Should only be used by internal library functions.
entailment
def get_switch_actors(self): """ Get information about all actors This needs 1+(5n) requests where n = number of actors registered Deprecated, use get_actors instead. Returns a dict: [ain] = { 'name': Name of actor, 'state': Powerstate (boolean) 'present': Connected to server? (boolean) 'power': Current power consumption in mW 'energy': Used energy in Wh since last energy reset 'temperature': Current environment temperature in celsius } """ actors = {} for ain in self.homeautoswitch("getswitchlist").split(','): actors[ain] = { 'name': self.homeautoswitch("getswitchname", ain), 'state': bool(self.homeautoswitch("getswitchstate", ain)), 'present': bool(self.homeautoswitch("getswitchpresent", ain)), 'power': self.homeautoswitch("getswitchpower", ain), 'energy': self.homeautoswitch("getswitchenergy", ain), 'temperature': self.homeautoswitch("getswitchtemperature", ain), } return actors
Get information about all actors This needs 1+(5n) requests where n = number of actors registered Deprecated, use get_actors instead. Returns a dict: [ain] = { 'name': Name of actor, 'state': Powerstate (boolean) 'present': Connected to server? (boolean) 'power': Current power consumption in mW 'energy': Used energy in Wh since last energy reset 'temperature': Current environment temperature in celsius }
entailment
def get_devices(self): """ Return a list of devices. Deprecated, use get_actors instead. """ url = self.base_url + '/net/home_auto_query.lua' response = self.session.get(url, params={ 'sid': self.sid, 'command': 'AllOutletStates', 'xhr': 0, }, timeout=15) response.raise_for_status() data = response.json() count = int(data["Outlet_count"]) devices = [] for i in range(1, count + 1): device = Device( int(data["DeviceID_{0}".format(i)]), int(data["DeviceConnectState_{0}".format(i)]), int(data["DeviceSwitchState_{0}".format(i)]) ) devices.append(device) return devices
Return a list of devices. Deprecated, use get_actors instead.
entailment
def get_consumption(self, deviceid, timerange="10"): """ Return all available energy consumption data for the device. You need to divice watt_values by 100 and volt_values by 1000 to get the "real" values. :return: dict """ tranges = ("10", "24h", "month", "year") if timerange not in tranges: raise ValueError( "Unknown timerange. Possible values are: {0}".format(tranges) ) url = self.base_url + "/net/home_auto_query.lua" response = self.session.get(url, params={ 'sid': self.sid, 'command': 'EnergyStats_{0}'.format(timerange), 'id': deviceid, 'xhr': 0, }, timeout=15) response.raise_for_status() data = response.json() result = {} # Single result values values_map = { 'MM_Value_Amp': 'mm_value_amp', 'MM_Value_Power': 'mm_value_power', 'MM_Value_Volt': 'mm_value_volt', 'EnStats_average_value': 'enstats_average_value', 'EnStats_max_value': 'enstats_max_value', 'EnStats_min_value': 'enstats_min_value', 'EnStats_timer_type': 'enstats_timer_type', 'sum_Day': 'sum_day', 'sum_Month': 'sum_month', 'sum_Year': 'sum_year', } for avm_key, py_key in values_map.items(): result[py_key] = int(data[avm_key]) # Stats counts count = int(data["EnStats_count"]) watt_values = [None for i in range(count)] volt_values = [None for i in range(count)] for i in range(1, count + 1): watt_values[i - 1] = int(data["EnStats_watt_value_{}".format(i)]) volt_values[i - 1] = int(data["EnStats_volt_value_{}".format(i)]) result['watt_values'] = watt_values result['volt_values'] = volt_values return result
Return all available energy consumption data for the device. You need to divice watt_values by 100 and volt_values by 1000 to get the "real" values. :return: dict
entailment
def get_logs(self): """ Return the system logs since the last reboot. """ assert BeautifulSoup, "Please install bs4 to use this method" url = self.base_url + "/system/syslog.lua" response = self.session.get(url, params={ 'sid': self.sid, 'stylemode': 'print', }, timeout=15) response.raise_for_status() entries = [] tree = BeautifulSoup(response.text) rows = tree.find('table').find_all('tr') for row in rows: columns = row.find_all("td") date = columns[0].string time = columns[1].string message = columns[2].find("a").string merged = "{} {} {}".format(date, time, message.encode("UTF-8")) msg_hash = hashlib.md5(merged).hexdigest() entries.append(LogEntry(date, time, message, msg_hash)) return entries
Return the system logs since the last reboot.
entailment
def seen_nonce(id, nonce, timestamp): """ Returns True if the Hawk nonce has been seen already. """ key = '{id}:{n}:{ts}'.format(id=id, n=nonce, ts=timestamp) if cache.get(key): log.warning('replay attack? already processed nonce {k}' .format(k=key)) return True else: log.debug('caching nonce {k}'.format(k=key)) cache.set(key, True, # We only need the nonce until the message itself expires. # This also adds a little bit of padding. timeout=getattr(settings, 'HAWK_MESSAGE_EXPIRATION', default_message_expiration) + 5) return False
Returns True if the Hawk nonce has been seen already.
entailment
def cli(context, host, username, password): """ FritzBox SmartHome Tool \b Provides the following functions: - A easy to use library for querying SmartHome actors - This CLI tool for testing - A carbon client for pipeing data into graphite """ context.obj = FritzBox(host, username, password)
FritzBox SmartHome Tool \b Provides the following functions: - A easy to use library for querying SmartHome actors - This CLI tool for testing - A carbon client for pipeing data into graphite
entailment
def actors(context): """Display a list of actors""" fritz = context.obj fritz.login() for actor in fritz.get_actors(): click.echo("{} ({} {}; AIN {} )".format( actor.name, actor.manufacturer, actor.productname, actor.actor_id, )) if actor.has_temperature: click.echo("Temp: act {} target {}; battery (low): {}".format( actor.temperature, actor.target_temperature, actor.battery_low, )) click.echo("Temp (via get): act {} target {}".format( actor.get_temperature(), actor.get_target_temperature(), ))
Display a list of actors
entailment
def energy(context, features): """Display energy stats of all actors""" fritz = context.obj fritz.login() for actor in fritz.get_actors(): if actor.temperature is not None: click.echo("{} ({}): {:.2f} Watt current, {:.3f} wH total, {:.2f} °C".format( actor.name.encode('utf-8'), actor.actor_id, (actor.get_power() or 0.0) / 1000, (actor.get_energy() or 0.0) / 100, actor.temperature )) else: click.echo("{} ({}): {:.2f} Watt current, {:.3f} wH total, offline".format( actor.name.encode('utf-8'), actor.actor_id, (actor.get_power() or 0.0) / 1000, (actor.get_energy() or 0.0) / 100 )) if features: click.echo(" Features: PowerMeter: {}, Temperatur: {}, Switch: {}".format( actor.has_powermeter, actor.has_temperature, actor.has_switch ))
Display energy stats of all actors
entailment
def graphite(context, server, port, interval, prefix): """Display energy stats of all actors""" fritz = context.obj fritz.login() sid_ttl = time.time() + 600 # Find actors and create carbon keys click.echo(" * Requesting actors list") simple_chars = re.compile('[^A-Za-z0-9]+') actors = fritz.get_actors() keys = {} for actor in actors: keys[actor.name] = "{}.{}".format( prefix, simple_chars.sub('_', actor.name) ) # Connect to carbon click.echo(" * Trying to connect to carbon") timeout = 2 sock = socket.socket() sock.settimeout(timeout) try: sock.connect((server, port)) except socket.timeout: raise Exception("Took over {} second(s) to connect to {}".format( timeout, server )) except Exception as error: raise Exception("unknown exception while connecting to {} - {}".format( server, error )) def send(key, value): """Send a key-value-pair to carbon""" now = int(time.time()) payload = "{} {} {}\n".format(key, value, now) sock.sendall(payload) while True: if time.time() > sid_ttl: click.echo(" * Requesting new SID") fritz.login() sid_ttl = time.time() + 600 click.echo(" * Requesting statistics") for actor in actors: power = actor.get_power() total = actor.get_energy() click.echo(" -> {}: {:.2f} Watt current, {:.3f} wH total".format( actor.name, power / 1000, total / 100 )) send(keys[actor.name] + '.current', power) send(keys[actor.name] + '.total', total) time.sleep(interval)
Display energy stats of all actors
entailment
def switch_on(context, ain): """Switch an actor's power to ON""" context.obj.login() actor = context.obj.get_actor_by_ain(ain) if actor: click.echo("Switching {} on".format(actor.name)) actor.switch_on() else: click.echo("Actor not found: {}".format(ain))
Switch an actor's power to ON
entailment
def switch_state(context, ain): """Get an actor's power state""" context.obj.login() actor = context.obj.get_actor_by_ain(ain) if actor: click.echo("State for {} is: {}".format(ain,'ON' if actor.get_state() else 'OFF')) else: click.echo("Actor not found: {}".format(ain))
Get an actor's power state
entailment
def switch_toggle(context, ain): """Toggle an actor's power state""" context.obj.login() actor = context.obj.get_actor_by_ain(ain) if actor: if actor.get_state(): actor.switch_off() click.echo("State for {} is now OFF".format(ain)) else: actor.switch_on() click.echo("State for {} is now ON".format(ain)) else: click.echo("Actor not found: {}".format(ain))
Toggle an actor's power state
entailment
def logs(context, format): """Show system logs since last reboot""" fritz = context.obj fritz.login() messages = fritz.get_logs() if format == "plain": for msg in messages: merged = "{} {} {}".format(msg.date, msg.time, msg.message.encode("UTF-8")) click.echo(merged) if format == "json": entries = [msg._asdict() for msg in messages] click.echo(json.dumps({ "entries": entries, }))
Show system logs since last reboot
entailment
def get_power(self): """ Returns the current power usage in milliWatts. Attention: Returns None if the value can't be queried or is unknown. """ value = self.box.homeautoswitch("getswitchpower", self.actor_id) return int(value) if value.isdigit() else None
Returns the current power usage in milliWatts. Attention: Returns None if the value can't be queried or is unknown.
entailment
def get_energy(self): """ Returns the consumed energy since the start of the statistics in Wh. Attention: Returns None if the value can't be queried or is unknown. """ value = self.box.homeautoswitch("getswitchenergy", self.actor_id) return int(value) if value.isdigit() else None
Returns the consumed energy since the start of the statistics in Wh. Attention: Returns None if the value can't be queried or is unknown.
entailment
def get_temperature(self): """ Returns the current environment temperature. Attention: Returns None if the value can't be queried or is unknown. """ #raise NotImplementedError("This should work according to the AVM docs, but don't...") value = self.box.homeautoswitch("gettemperature", self.actor_id) if value.isdigit(): self.temperature = float(value)/10 else: self.temperature = None return self.temperature
Returns the current environment temperature. Attention: Returns None if the value can't be queried or is unknown.
entailment
def get_target_temperature(self): """ Returns the actual target temperature. Attention: Returns None if the value can't be queried or is unknown. """ value = self.box.homeautoswitch("gethkrtsoll", self.actor_id) self.target_temperature = self.__get_temp(value) return self.target_temperature
Returns the actual target temperature. Attention: Returns None if the value can't be queried or is unknown.
entailment
def set_temperature(self, temp): """ Sets the temperature in celcius """ # Temperature is send to fritz.box a little weird param = 16 + ( ( temp - 8 ) * 2 ) if param < 16: param = 253 logger.info("Actor " + self.name + ": Temperature control set to off") elif param >= 56: param = 254 logger.info("Actor " + self.name + ": Temperature control set to on") else: logger.info("Actor " + self.name + ": Temperature control set to " + str(temp)) return self.box.homeautoswitch("sethkrtsoll", self.actor_id, param)
Sets the temperature in celcius
entailment
def get_openshift_base_uri(self): """ https://<host>[:<port>]/ :return: str """ deprecated_key = "openshift_uri" key = "openshift_url" val = self._get_value(deprecated_key, self.conf_section, deprecated_key) if val is not None: warnings.warn("%r is deprecated, use %r instead" % (deprecated_key, key)) return val return self._get_value(key, self.conf_section, key)
https://<host>[:<port>]/ :return: str
entailment
def get_builder_openshift_url(self): """ url of OpenShift where builder will connect """ key = "builder_openshift_url" url = self._get_deprecated(key, self.conf_section, key) if url is None: logger.warning("%r not found, falling back to get_openshift_base_uri()", key) url = self.get_openshift_base_uri() return url
url of OpenShift where builder will connect
entailment
def generate_nodeselector_dict(self, nodeselector_str): """ helper method for generating nodeselector dict :param nodeselector_str: :return: dict """ nodeselector = {} if nodeselector_str and nodeselector_str != 'none': constraints = [x.strip() for x in nodeselector_str.split(',')] raw_nodeselector = dict([constraint.split('=', 1) for constraint in constraints]) nodeselector = dict([k.strip(), v.strip()] for (k, v) in raw_nodeselector.items()) return nodeselector
helper method for generating nodeselector dict :param nodeselector_str: :return: dict
entailment
def get_platform_node_selector(self, platform): """ search the configuration for entries of the form node_selector.platform :param platform: str, platform to search for, can be null :return dict """ nodeselector = {} if platform: nodeselector_str = self._get_value("node_selector." + platform, self.conf_section, "node_selector." + platform) nodeselector = self.generate_nodeselector_dict(nodeselector_str) return nodeselector
search the configuration for entries of the form node_selector.platform :param platform: str, platform to search for, can be null :return dict
entailment
def load(self): """ Extract tabular data as |TableData| instances from a CSV file. |load_source_desc_file| :return: Loaded table data. |load_table_name_desc| =================== ======================================== Format specifier Value after the replacement =================== ======================================== ``%(filename)s`` |filename_desc| ``%(format_name)s`` ``"csv"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ======================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the CSV data is invalid. .. seealso:: :py:func:`csv.reader` """ self._validate() self._logger.logging_load() self.encoding = get_file_encoding(self.source, self.encoding) if six.PY3: self._csv_reader = csv.reader( io.open(self.source, "r", encoding=self.encoding), delimiter=self.delimiter, quotechar=self.quotechar, strict=True, skipinitialspace=True, ) else: self._csv_reader = csv.reader( _utf_8_encoder(io.open(self.source, "r", encoding=self.encoding)), delimiter=self.delimiter, quotechar=self.quotechar, strict=True, skipinitialspace=True, ) formatter = CsvTableFormatter(self._to_data_matrix()) formatter.accept(self) return formatter.to_table_data()
Extract tabular data as |TableData| instances from a CSV file. |load_source_desc_file| :return: Loaded table data. |load_table_name_desc| =================== ======================================== Format specifier Value after the replacement =================== ======================================== ``%(filename)s`` |filename_desc| ``%(format_name)s`` ``"csv"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ======================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the CSV data is invalid. .. seealso:: :py:func:`csv.reader`
entailment
def load(self): """ Extract tabular data as |TableData| instances from a CSV text object. |load_source_desc_text| :return: Loaded table data. |load_table_name_desc| =================== ======================================== Format specifier Value after the replacement =================== ======================================== ``%(filename)s`` ``""`` ``%(format_name)s`` ``"csv"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ======================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the CSV data is invalid. .. seealso:: :py:func:`csv.reader` """ self._validate() self._logger.logging_load() self._csv_reader = csv.reader( six.StringIO(self.source.strip()), delimiter=self.delimiter, quotechar=self.quotechar, strict=True, skipinitialspace=True, ) formatter = CsvTableFormatter(self._to_data_matrix()) formatter.accept(self) return formatter.to_table_data()
Extract tabular data as |TableData| instances from a CSV text object. |load_source_desc_text| :return: Loaded table data. |load_table_name_desc| =================== ======================================== Format specifier Value after the replacement =================== ======================================== ``%(filename)s`` ``""`` ``%(format_name)s`` ``"csv"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ======================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the CSV data is invalid. .. seealso:: :py:func:`csv.reader`
entailment
def set_params(self, **kwargs): """ set parameters according to specification these parameters are accepted: :param pulp_secret: str, resource name of pulp secret :param koji_target: str, koji tag with packages used to build the image :param kojiroot: str, URL from which koji packages are fetched :param kojihub: str, URL of the koji hub :param koji_certs_secret: str, resource name of secret that holds the koji certificates :param koji_task_id: int, Koji Task that created this build config :param flatpak: if we should build a Flatpak OCI Image :param filesystem_koji_task_id: int, Koji Task that created the base filesystem :param pulp_registry: str, name of pulp registry in dockpulp.conf :param sources_command: str, command used to fetch dist-git sources :param architecture: str, architecture we are building for :param vendor: str, vendor name :param build_host: str, host the build will run on or None for auto :param authoritative_registry: str, the docker registry authoritative for this image :param distribution_scope: str, distribution scope for this image (private, authoritative-source-only, restricted, public) :param use_auth: bool, use auth from atomic-reactor? :param platform_node_selector: dict, a nodeselector for a specific platform :param platform_descriptors: dict, platforms and their archiectures and enable_v1 settings :param scratch_build_node_selector: dict, a nodeselector for scratch builds :param explicit_build_node_selector: dict, a nodeselector for explicit builds :param auto_build_node_selector: dict, a nodeselector for auto builds :param isolated_build_node_selector: dict, a nodeselector for isolated builds :param is_auto: bool, indicates if build is auto build :param parent_images_digests: dict, mapping image names with tags to platform specific digests, example: {'registry.fedorahosted.org/fedora:29': { x86_64': 'registry.fedorahosted.org/fedora@sha256:....'} } """ # Here we cater to the koji "scratch" build type, this will disable # all plugins that might cause importing of data to koji self.scratch = kwargs.pop('scratch', False) # When true, it indicates build was automatically started by # OpenShift via a trigger, for instance ImageChangeTrigger self.is_auto = kwargs.pop('is_auto', False) # An isolated build is meant to patch a certain release and not # update transient tags in container registry self.isolated = kwargs.pop('isolated', False) self.validate_build_variation() self.base_image = kwargs.get('base_image') self.platform_node_selector = kwargs.get('platform_node_selector', {}) self.platform_descriptors = kwargs.get('platform_descriptors', {}) self.scratch_build_node_selector = kwargs.get('scratch_build_node_selector', {}) self.explicit_build_node_selector = kwargs.get('explicit_build_node_selector', {}) self.auto_build_node_selector = kwargs.get('auto_build_node_selector', {}) self.isolated_build_node_selector = kwargs.get('isolated_build_node_selector', {}) logger.debug("setting params '%s' for %s", kwargs, self.spec) self.spec.set_params(**kwargs) self.osbs_api = kwargs.pop('osbs_api')
set parameters according to specification these parameters are accepted: :param pulp_secret: str, resource name of pulp secret :param koji_target: str, koji tag with packages used to build the image :param kojiroot: str, URL from which koji packages are fetched :param kojihub: str, URL of the koji hub :param koji_certs_secret: str, resource name of secret that holds the koji certificates :param koji_task_id: int, Koji Task that created this build config :param flatpak: if we should build a Flatpak OCI Image :param filesystem_koji_task_id: int, Koji Task that created the base filesystem :param pulp_registry: str, name of pulp registry in dockpulp.conf :param sources_command: str, command used to fetch dist-git sources :param architecture: str, architecture we are building for :param vendor: str, vendor name :param build_host: str, host the build will run on or None for auto :param authoritative_registry: str, the docker registry authoritative for this image :param distribution_scope: str, distribution scope for this image (private, authoritative-source-only, restricted, public) :param use_auth: bool, use auth from atomic-reactor? :param platform_node_selector: dict, a nodeselector for a specific platform :param platform_descriptors: dict, platforms and their archiectures and enable_v1 settings :param scratch_build_node_selector: dict, a nodeselector for scratch builds :param explicit_build_node_selector: dict, a nodeselector for explicit builds :param auto_build_node_selector: dict, a nodeselector for auto builds :param isolated_build_node_selector: dict, a nodeselector for isolated builds :param is_auto: bool, indicates if build is auto build :param parent_images_digests: dict, mapping image names with tags to platform specific digests, example: {'registry.fedorahosted.org/fedora:29': { x86_64': 'registry.fedorahosted.org/fedora@sha256:....'} }
entailment
def has_ist_trigger(self): """Return True if this BuildConfig has ImageStreamTag trigger.""" triggers = self.template['spec'].get('triggers', []) if not triggers: return False for trigger in triggers: if trigger['type'] == 'ImageChange' and \ trigger['imageChange']['from']['kind'] == 'ImageStreamTag': return True return False
Return True if this BuildConfig has ImageStreamTag trigger.
entailment
def set_secret_for_plugin(self, secret, plugin=None, mount_path=None): """ Sets secret for plugin, if no plugin specified it will also set general secret :param secret: str, secret name :param plugin: tuple, (plugin type, plugin name, argument name) :param mount_path: str, mount path of secret """ has_plugin_conf = False if plugin is not None: has_plugin_conf = self.dj.dock_json_has_plugin_conf(plugin[0], plugin[1]) if 'secrets' in self.template['spec']['strategy']['customStrategy']: if not plugin or has_plugin_conf: custom = self.template['spec']['strategy']['customStrategy'] if mount_path: secret_path = mount_path else: secret_path = os.path.join(SECRETS_PATH, secret) logger.info("Configuring %s secret at %s", secret, secret_path) existing = [secret_mount for secret_mount in custom['secrets'] if secret_mount['secretSource']['name'] == secret] if existing: logger.debug("secret %s already set", secret) else: custom['secrets'].append({ 'secretSource': { 'name': secret, }, 'mountPath': secret_path, }) # there's no need to set args if no plugin secret specified # this is used in tag_and_push plugin, as it sets secret path # for each registry separately if plugin and plugin[2] is not None: self.dj.dock_json_set_arg(*(plugin + (secret_path,))) else: logger.debug("not setting secret for unused plugin %s", plugin[1])
Sets secret for plugin, if no plugin specified it will also set general secret :param secret: str, secret name :param plugin: tuple, (plugin type, plugin name, argument name) :param mount_path: str, mount path of secret
entailment
def set_secrets(self, secrets): """ :param secrets: dict, {(plugin type, plugin name, argument name): secret name} for example {('exit_plugins', 'koji_promote', 'koji_ssl_certs'): 'koji_ssl_certs', ...} """ secret_set = False for (plugin, secret) in secrets.items(): if not isinstance(plugin, tuple) or len(plugin) != 3: raise ValueError('got "%s" as secrets key, need 3-tuple' % plugin) if secret is not None: if isinstance(secret, list): for secret_item in secret: self.set_secret_for_plugin(secret_item, plugin=plugin) else: self.set_secret_for_plugin(secret, plugin=plugin) secret_set = True if not secret_set: # remove references to secret if no secret was set if 'secrets' in self.template['spec']['strategy']['customStrategy']: del self.template['spec']['strategy']['customStrategy']['secrets']
:param secrets: dict, {(plugin type, plugin name, argument name): secret name} for example {('exit_plugins', 'koji_promote', 'koji_ssl_certs'): 'koji_ssl_certs', ...}
entailment
def remove_tag_and_push_registries(tag_and_push_registries, version): """ Remove matching entries from tag_and_push_registries (in-place) :param tag_and_push_registries: dict, uri -> dict :param version: str, 'version' to match against """ registries = [uri for uri, regdict in tag_and_push_registries.items() if regdict['version'] == version] for registry in registries: logger.info("removing %s registry: %s", version, registry) del tag_and_push_registries[registry]
Remove matching entries from tag_and_push_registries (in-place) :param tag_and_push_registries: dict, uri -> dict :param version: str, 'version' to match against
entailment
def adjust_for_registry_api_versions(self): """ Enable/disable plugins depending on supported registry API versions """ versions = self.spec.registry_api_versions.value if 'v2' not in versions: raise OsbsValidationException('v1-only docker registry API is not supported') try: push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins', 'tag_and_push') tag_and_push_registries = push_conf['args']['registries'] except (KeyError, IndexError): tag_and_push_registries = {} if 'v1' not in versions: # Remove v1-only plugins for phase, name in [('postbuild_plugins', 'pulp_push')]: logger.info("removing v1-only plugin: %s", name) self.dj.remove_plugin(phase, name) # remove extra tag_and_push config self.remove_tag_and_push_registries(tag_and_push_registries, 'v1') # Remove 'version' from tag_and_push plugin config as it's no # longer needed for regdict in tag_and_push_registries.values(): if 'version' in regdict: del regdict['version']
Enable/disable plugins depending on supported registry API versions
entailment
def adjust_for_triggers(self): """Remove trigger-related plugins when needed If there are no triggers defined, it's assumed the feature is disabled and all trigger-related plugins are removed. If there are triggers defined, and this is a custom base image, some trigger-related plugins do not apply. Additionally, this method ensures that custom base images never have triggers since triggering a base image rebuild is not a valid scenario. """ triggers = self.template['spec'].get('triggers', []) remove_plugins = [ ("prebuild_plugins", "check_and_set_rebuild"), ("prebuild_plugins", "stop_autorebuild_if_disabled"), ] should_remove = False if triggers and (self.is_custom_base_image() or self.is_from_scratch_image()): if self.is_custom_base_image(): msg = "removing %s from request because custom base image" elif self.is_from_scratch_image(): msg = 'removing %s from request because FROM scratch image' del self.template['spec']['triggers'] should_remove = True elif not triggers: msg = "removing %s from request because there are no triggers" should_remove = True if should_remove: for when, which in remove_plugins: logger.info(msg, which) self.dj.remove_plugin(when, which)
Remove trigger-related plugins when needed If there are no triggers defined, it's assumed the feature is disabled and all trigger-related plugins are removed. If there are triggers defined, and this is a custom base image, some trigger-related plugins do not apply. Additionally, this method ensures that custom base images never have triggers since triggering a base image rebuild is not a valid scenario.
entailment
def adjust_for_scratch(self): """ Remove certain plugins in order to handle the "scratch build" scenario. Scratch builds must not affect subsequent builds, and should not be imported into Koji. """ if self.scratch: self.template['spec'].pop('triggers', None) remove_plugins = [ ("prebuild_plugins", "koji_parent"), ("postbuild_plugins", "compress"), # required only to make an archive for Koji ("postbuild_plugins", "pulp_pull"), # required only to make an archive for Koji ("postbuild_plugins", "koji_upload"), ("postbuild_plugins", "fetch_worker_metadata"), ("postbuild_plugins", "compare_components"), ("postbuild_plugins", "import_image"), ("exit_plugins", "koji_promote"), ("exit_plugins", "koji_import"), ("exit_plugins", "koji_tag_build"), ("exit_plugins", "remove_worker_metadata"), ("exit_plugins", "import_image"), ] if not self.has_tag_suffixes_placeholder(): remove_plugins.append(("postbuild_plugins", "tag_from_config")) for when, which in remove_plugins: logger.info("removing %s from scratch build request", which) self.dj.remove_plugin(when, which) if self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'tag_by_labels'): self.dj.dock_json_set_arg('postbuild_plugins', 'tag_by_labels', 'unique_tag_only', True) self.set_label('scratch', 'true')
Remove certain plugins in order to handle the "scratch build" scenario. Scratch builds must not affect subsequent builds, and should not be imported into Koji.
entailment
def adjust_for_custom_base_image(self): """ Disable plugins to handle builds depending on whether or not this is a build from a custom base image. """ plugins = [] if self.is_custom_base_image(): # Plugins irrelevant to building base images. plugins.append(("prebuild_plugins", "pull_base_image")) plugins.append(("prebuild_plugins", "koji_parent")) plugins.append(("prebuild_plugins", "inject_parent_image")) msg = "removing %s from custom image build request" else: # Plugins not needed for building non base images. plugins.append(("prebuild_plugins", "add_filesystem")) msg = "removing %s from non custom image build request" for when, which in plugins: logger.info(msg, which) self.dj.remove_plugin(when, which)
Disable plugins to handle builds depending on whether or not this is a build from a custom base image.
entailment
def render_koji(self): """ if there is yum repo specified, don't pick stuff from koji """ phase = 'prebuild_plugins' plugin = 'koji' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return if self.spec.yum_repourls.value: logger.info("removing koji from request " "because there is yum repo specified") self.dj.remove_plugin(phase, plugin) elif not (self.spec.koji_target.value and self.spec.kojiroot.value and self.spec.kojihub.value): logger.info("removing koji from request as not specified") self.dj.remove_plugin(phase, plugin) else: self.dj.dock_json_set_arg(phase, plugin, "target", self.spec.koji_target.value) self.dj.dock_json_set_arg(phase, plugin, "root", self.spec.kojiroot.value) self.dj.dock_json_set_arg(phase, plugin, "hub", self.spec.kojihub.value) if self.spec.proxy.value: self.dj.dock_json_set_arg(phase, plugin, "proxy", self.spec.proxy.value)
if there is yum repo specified, don't pick stuff from koji
entailment
def render_bump_release(self): """ If the bump_release plugin is present, configure it """ phase = 'prebuild_plugins' plugin = 'bump_release' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return if self.spec.release.value: logger.info('removing %s from request as release already specified', plugin) self.dj.remove_plugin(phase, plugin) return hub = self.spec.kojihub.value if not hub: logger.info('removing %s from request as koji hub not specified', plugin) self.dj.remove_plugin(phase, plugin) return self.dj.dock_json_set_arg(phase, plugin, 'hub', hub) # For flatpak, we want a name-version-release of # <name>-<stream>-<module_build_version>.<n>, where the .<n> makes # sure that the build is unique in Koji if self.spec.flatpak.value: self.dj.dock_json_set_arg(phase, plugin, 'append', True)
If the bump_release plugin is present, configure it
entailment
def render_sendmail(self): """ if we have smtp_host and smtp_from, configure sendmail plugin, else remove it """ phase = 'exit_plugins' plugin = 'sendmail' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return if self.spec.smtp_host.value and self.spec.smtp_from.value: self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.builder_openshift_url.value) self.dj.dock_json_set_arg(phase, plugin, 'smtp_host', self.spec.smtp_host.value) self.dj.dock_json_set_arg(phase, plugin, 'from_address', self.spec.smtp_from.value) else: logger.info("removing sendmail from request, " "requires smtp_host and smtp_from") self.dj.remove_plugin(phase, plugin) return if self.spec.kojihub.value and self.spec.kojiroot.value: self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', self.spec.kojihub.value) self.dj.dock_json_set_arg(phase, plugin, "koji_root", self.spec.kojiroot.value) if self.spec.smtp_to_submitter.value: self.dj.dock_json_set_arg(phase, plugin, 'to_koji_submitter', self.spec.smtp_to_submitter.value) if self.spec.smtp_to_pkgowner.value: self.dj.dock_json_set_arg(phase, plugin, 'to_koji_pkgowner', self.spec.smtp_to_pkgowner.value) if self.spec.smtp_additional_addresses.value: self.dj.dock_json_set_arg(phase, plugin, 'additional_addresses', self.spec.smtp_additional_addresses.value) if self.spec.smtp_error_addresses.value: self.dj.dock_json_set_arg(phase, plugin, 'error_addresses', self.spec.smtp_error_addresses.value) if self.spec.smtp_email_domain.value: self.dj.dock_json_set_arg(phase, plugin, 'email_domain', self.spec.smtp_email_domain.value)
if we have smtp_host and smtp_from, configure sendmail plugin, else remove it
entailment
def render_fetch_maven_artifacts(self): """Configure fetch_maven_artifacts plugin""" phase = 'prebuild_plugins' plugin = 'fetch_maven_artifacts' if not self.dj.dock_json_has_plugin_conf(phase, plugin): return koji_hub = self.spec.kojihub.value koji_root = self.spec.kojiroot.value if not koji_hub and not koji_root: logger.info('Removing %s because kojihub and kojiroot were not specified', plugin) self.dj.remove_plugin(phase, plugin) return self.dj.dock_json_set_arg(phase, plugin, 'koji_hub', koji_hub) self.dj.dock_json_set_arg(phase, plugin, "koji_root", koji_root) if self.spec.artifacts_allowed_domains.value: self.dj.dock_json_set_arg(phase, plugin, 'allowed_domains', self.spec.artifacts_allowed_domains.value)
Configure fetch_maven_artifacts plugin
entailment
def render_tag_from_config(self): """Configure tag_from_config plugin""" phase = 'postbuild_plugins' plugin = 'tag_from_config' if not self.has_tag_suffixes_placeholder(): return unique_tag = self.spec.image_tag.value.split(':')[-1] tag_suffixes = {'unique': [unique_tag], 'primary': []} if self.spec.build_type.value == BUILD_TYPE_ORCHESTRATOR: if self.scratch: pass elif self.isolated: tag_suffixes['primary'].extend(['{version}-{release}']) elif self._repo_info.additional_tags.from_container_yaml: tag_suffixes['primary'].extend(['{version}-{release}']) tag_suffixes['primary'].extend(self._repo_info.additional_tags.tags) else: tag_suffixes['primary'].extend(['latest', '{version}', '{version}-{release}']) tag_suffixes['primary'].extend(self._repo_info.additional_tags.tags) self.dj.dock_json_set_arg(phase, plugin, 'tag_suffixes', tag_suffixes)
Configure tag_from_config plugin
entailment
def render_pulp_pull(self): """ If a pulp registry is specified, use pulp_pull plugin """ # pulp_pull is a multi-phase plugin phases = ('postbuild_plugins', 'exit_plugins') plugin = 'pulp_pull' for phase in phases: if not self.dj.dock_json_has_plugin_conf(phase, plugin): continue pulp_registry = self.spec.pulp_registry.value if not pulp_registry: logger.info("removing %s from request, requires pulp_registry", pulp_registry) self.dj.remove_plugin(phase, plugin) continue if not self.spec.kojihub.value: logger.info('Removing %s because no kojihub was specified', plugin) self.dj.remove_plugin(phase, plugin) continue if self.spec.prefer_schema1_digest.value is not None: self.dj.dock_json_set_arg(phase, 'pulp_pull', 'expect_v2schema2', not self.spec.prefer_schema1_digest.value)
If a pulp registry is specified, use pulp_pull plugin
entailment
def render_pulp_sync(self): """ If a pulp registry is specified, use the pulp plugin as well as the delete_from_registry to delete the image after sync """ if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_sync'): return pulp_registry = self.spec.pulp_registry.value # Find which registry to use docker_registry = None registry_secret = None registries = zip_longest(self.spec.registry_uris.value, self.spec.registry_secrets.value) for registry, secret in registries: if registry.version == 'v2': # First specified v2 registry is the one we'll tell pulp # to sync from. Keep the http prefix -- pulp wants it. docker_registry = registry.uri registry_secret = secret logger.info("using docker v2 registry %s for pulp_sync", docker_registry) break if pulp_registry and docker_registry: self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'pulp_registry_name', pulp_registry) self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_sync', 'docker_registry', docker_registry) if registry_secret: self.set_secret_for_plugin(registry_secret, plugin=('postbuild_plugins', 'pulp_sync', 'registry_secret_path')) # Verify we have a pulp secret if self.spec.pulp_secret.value is None: raise OsbsValidationException("Pulp registry specified " "but no auth config") source_registry = self.spec.source_registry_uri.value perform_delete = (source_registry is None or source_registry.docker_uri != registry.docker_uri) if perform_delete: push_conf = self.dj.dock_json_get_plugin_conf('exit_plugins', 'delete_from_registry') args = push_conf.setdefault('args', {}) delete_registries = args.setdefault('registries', {}) placeholder = '{{REGISTRY_URI}}' # use passed in params like 'insecure' if available if placeholder in delete_registries: regdict = delete_registries[placeholder].copy() del delete_registries[placeholder] else: regdict = {} if registry_secret: regdict['secret'] = \ os.path.join(SECRETS_PATH, registry_secret) # tag_and_push configured the registry secret, no neet to set it again delete_registries[docker_registry] = regdict self.dj.dock_json_set_arg('exit_plugins', 'delete_from_registry', 'registries', delete_registries) else: logger.info("removing delete_from_registry from request, " "source and target registry are identical") self.dj.remove_plugin("exit_plugins", "delete_from_registry") else: # If no pulp registry is specified, don't run the pulp plugin logger.info("removing pulp_sync+delete_from_registry from request, " "requires pulp_registry and a v2 registry") self.dj.remove_plugin("postbuild_plugins", "pulp_sync") self.dj.remove_plugin("exit_plugins", "delete_from_registry")
If a pulp registry is specified, use the pulp plugin as well as the delete_from_registry to delete the image after sync
entailment
def render_pulp_tag(self): """ Configure the pulp_tag plugin. """ if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'pulp_tag'): return pulp_registry = self.spec.pulp_registry.value if pulp_registry: self.dj.dock_json_set_arg('postbuild_plugins', 'pulp_tag', 'pulp_registry_name', pulp_registry) # Verify we have either a secret or username/password if self.spec.pulp_secret.value is None: conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins', 'pulp_tag') args = conf.get('args', {}) if 'username' not in args: raise OsbsValidationException("Pulp registry specified " "but no auth config") else: # If no pulp registry is specified, don't run the pulp plugin logger.info("removing pulp_tag from request, " "requires pulp_registry") self.dj.remove_plugin("postbuild_plugins", "pulp_tag")
Configure the pulp_tag plugin.
entailment
def render_group_manifests(self): """ Configure the group_manifests plugin. Group is always set to false for now. """ if not self.dj.dock_json_has_plugin_conf('postbuild_plugins', 'group_manifests'): return push_conf = self.dj.dock_json_get_plugin_conf('postbuild_plugins', 'group_manifests') args = push_conf.setdefault('args', {}) # modify registries in place registries = args.setdefault('registries', {}) placeholder = '{{REGISTRY_URI}}' if placeholder in registries: for registry, secret in zip_longest(self.spec.registry_uris.value, self.spec.registry_secrets.value): if not registry.uri: continue regdict = registries[placeholder].copy() regdict['version'] = registry.version if secret: regdict['secret'] = os.path.join(SECRETS_PATH, secret) registries[registry.docker_uri] = regdict del registries[placeholder] self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests', 'group', self.spec.group_manifests.value) goarch = {} for platform, architecture in self.platform_descriptors.items(): goarch[platform] = architecture['architecture'] self.dj.dock_json_set_arg('postbuild_plugins', 'group_manifests', 'goarch', goarch)
Configure the group_manifests plugin. Group is always set to false for now.
entailment
def render_import_image(self, use_auth=None): """ Configure the import_image plugin """ # import_image is a multi-phase plugin phases = ('postbuild_plugins', 'exit_plugins') plugin = 'import_image' for phase in phases: if self.spec.imagestream_name.value is None or self.spec.imagestream_url.value is None: logger.info("removing %s from request, " "registry or repo url is not defined", plugin) self.dj.remove_plugin(phase, plugin) continue if self.dj.dock_json_has_plugin_conf(phase, plugin): self.dj.dock_json_set_arg(phase, plugin, 'imagestream', self.spec.imagestream_name.value) self.dj.dock_json_set_arg(phase, plugin, 'docker_image_repo', self.spec.imagestream_url.value) self.dj.dock_json_set_arg(phase, plugin, 'url', self.spec.builder_openshift_url.value) self.dj.dock_json_set_arg(phase, plugin, 'build_json_dir', self.spec.builder_build_json_dir.value) use_auth = self.spec.use_auth.value if use_auth is not None: self.dj.dock_json_set_arg(phase, plugin, 'use_auth', use_auth) if self.spec.imagestream_insecure_registry.value: self.dj.dock_json_set_arg(phase, plugin, 'insecure_registry', True)
Configure the import_image plugin
entailment
def render_customizations(self): """ Customize prod_inner for site specific customizations """ disable_plugins = self.customize_conf.get('disable_plugins', []) if not disable_plugins: logger.debug("No site-specific plugins to disable") else: for plugin_dict in disable_plugins: try: self.dj.remove_plugin( plugin_dict['plugin_type'], plugin_dict['plugin_name'] ) logger.debug( "site-specific plugin disabled -> Type:{} Name:{}".format( plugin_dict['plugin_type'], plugin_dict['plugin_name'] ) ) except KeyError: # Malformed config logger.debug("Invalid custom configuration found for disable_plugins") enable_plugins = self.customize_conf.get('enable_plugins', []) if not enable_plugins: logger.debug("No site-specific plugins to enable") else: for plugin_dict in enable_plugins: try: self.dj.add_plugin( plugin_dict['plugin_type'], plugin_dict['plugin_name'], plugin_dict['plugin_args'] ) logger.debug( "site-specific plugin enabled -> Type:{} Name:{} Args: {}".format( plugin_dict['plugin_type'], plugin_dict['plugin_name'], plugin_dict['plugin_args'] ) ) except KeyError: # Malformed config logger.debug("Invalid custom configuration found for enable_plugins")
Customize prod_inner for site specific customizations
entailment
def render_name(self, name, image_tag, platform): """Sets the Build/BuildConfig object name""" if self.scratch or self.isolated: name = image_tag # Platform name may contain characters not allowed by OpenShift. if platform: platform_suffix = '-{}'.format(platform) if name.endswith(platform_suffix): name = name[:-len(platform_suffix)] _, salt, timestamp = name.rsplit('-', 2) if self.scratch: name = 'scratch-{}-{}'.format(salt, timestamp) elif self.isolated: name = 'isolated-{}-{}'.format(salt, timestamp) # !IMPORTANT! can't be too long: https://github.com/openshift/origin/issues/733 self.template['metadata']['name'] = name
Sets the Build/BuildConfig object name
entailment
def setup_json_capture(osbs, os_conf, capture_dir): """ Only used for setting up the testing framework. """ try: os.mkdir(capture_dir) except OSError: pass finally: osbs.os._con.request = ResponseSaver(capture_dir, os_conf.get_openshift_api_uri(), os_conf.get_k8s_api_uri(), osbs.os._con.request).request
Only used for setting up the testing framework.
entailment
def get_terminal_size(): """ get size of console: rows x columns :return: tuple, (int, int) """ try: rows, columns = subprocess.check_output(['stty', 'size']).split() except subprocess.CalledProcessError: # not attached to terminal logger.info("not attached to terminal") return 0, 0 logger.debug("console size is %s %s", rows, columns) return int(rows), int(columns)
get size of console: rows x columns :return: tuple, (int, int)
entailment
def _longest_val_in_column(self, col): """ get size of longest value in specific column :param col: str, column name :return int """ try: # +2 is for implicit separator return max([len(x[col]) for x in self.table if x[col]]) + 2 except KeyError: logger.error("there is no column %r", col) raise
get size of longest value in specific column :param col: str, column name :return int
entailment
def _init(self): """ initialize all values based on provided input :return: None """ self.col_count = len(self.col_list) # list of lengths of longest entries in columns self.col_longest = self.get_all_longest_col_lengths() self.data_length = sum(self.col_longest.values()) if self.terminal_width > 0: # free space is space which should be equeally distributed for all columns # self.terminal_width -- terminal is our canvas # - self.data_length -- substract length of content (the actual data) # - self.col_count + 1 -- table lines are not part of free space, their width is # (number of columns - 1) self.total_free_space = (self.terminal_width - self.data_length) - self.col_count + 1 if self.total_free_space <= 0: self.total_free_space = None else: self.default_column_space = self.total_free_space // self.col_count self.default_column_space_remainder = self.total_free_space % self.col_count logger.debug("total free space: %d, column space: %d, remainder: %d, columns: %d", self.total_free_space, self.default_column_space, self.default_column_space_remainder, self.col_count) else: self.total_free_space = None
initialize all values based on provided input :return: None
entailment
def _count_sizes(self): """ count all values needed to display whole table <><---terminal-width-----------><> <> HEADER | HEADER2 | HEADER3 <> <>---------+----------+---------<> kudos to PostgreSQL developers :return: None """ format_list = [] header_sepa_format_list = [] # actual widths of columns self.col_widths = {} for col in self.col_list: col_length = self.col_longest[col] col_width = col_length + self._separate() # -2 is for implicit separator -- spaces around format_list.append(" {%s:%d} " % (col, col_width - 2)) header_sepa_format_list.append("{%s:%d}" % (col, col_width)) self.col_widths[col] = col_width logger.debug("column widths %s", self.col_widths) self.format_str = "|".join(format_list) self.header_format_str = "+".join(header_sepa_format_list) self.header_data = {} for k in self.col_widths: self.header_data[k] = "-" * self.col_widths[k]
count all values needed to display whole table <><---terminal-width-----------><> <> HEADER | HEADER2 | HEADER3 <> <>---------+----------+---------<> kudos to PostgreSQL developers :return: None
entailment
def get_all_longest_col_lengths(self): """ iterate over all columns and get their longest values :return: dict, {"column_name": 132} """ response = {} for col in self.col_list: response[col] = self._longest_val_in_column(col) return response
iterate over all columns and get their longest values :return: dict, {"column_name": 132}
entailment
def _separate(self): """ get a width of separator for current column :return: int """ if self.total_free_space is None: return 0 else: sepa = self.default_column_space # we need to distribute remainders if self.default_column_space_remainder > 0: sepa += 1 self.default_column_space_remainder -= 1 logger.debug("remainder: %d, separator: %d", self.default_column_space_remainder, sepa) return sepa
get a width of separator for current column :return: int
entailment
def render(self): """ print provided table :return: None """ print(self.format_str.format(**self.header), file=sys.stderr) print(self.header_format_str.format(**self.header_data), file=sys.stderr) for row in self.data: print(self.format_str.format(**row))
print provided table :return: None
entailment
def _validate_source_data(self): """ :raises ValidationError: """ try: jsonschema.validate(self._buffer, self._schema) except jsonschema.ValidationError as e: raise ValidationError(e)
:raises ValidationError:
entailment
def to_table_data(self): """ :raises ValueError: :raises pytablereader.error.ValidationError: """ self._validate_source_data() self._loader.inc_table_count() yield TableData( self._make_table_name(), ["key", "value"], [record for record in self._buffer.items()], dp_extractor=self._loader.dp_extractor, type_hints=self._extract_type_hints(), )
:raises ValueError: :raises pytablereader.error.ValidationError:
entailment
def to_table_data(self): """ :raises ValueError: :raises pytablereader.error.ValidationError: """ self._validate_source_data() for table_key, json_records in six.iteritems(self._buffer): attr_name_set = set() for json_record in json_records: attr_name_set = attr_name_set.union(six.viewkeys(json_record)) headers = sorted(attr_name_set) self._loader.inc_table_count() self._table_key = table_key yield TableData( self._make_table_name(), headers, json_records, dp_extractor=self._loader.dp_extractor, type_hints=self._extract_type_hints(headers), )
:raises ValueError: :raises pytablereader.error.ValidationError:
entailment
def to_table_data(self): """ :raises ValueError: :raises pytablereader.error.ValidationError: """ self._validate_source_data() for table_key, json_records in six.iteritems(self._buffer): headers = sorted(six.viewkeys(json_records)) self._loader.inc_table_count() self._table_key = table_key yield TableData( self._make_table_name(), headers, zip(*[json_records.get(header) for header in headers]), dp_extractor=self._loader.dp_extractor, type_hints=self._extract_type_hints(headers), )
:raises ValueError: :raises pytablereader.error.ValidationError:
entailment
def to_table_data(self): """ :raises ValueError: :raises pytablereader.error.ValidationError: """ self._validate_source_data() for table_key, json_records in six.iteritems(self._buffer): self._loader.inc_table_count() self._table_key = table_key yield TableData( self._make_table_name(), ["key", "value"], [record for record in json_records.items()], dp_extractor=self._loader.dp_extractor, type_hints=self._extract_type_hints(), )
:raises ValueError: :raises pytablereader.error.ValidationError:
entailment
def set_params(self, **kwargs): """ set parameters in the user parameters these parameters are accepted: :param git_uri: str, uri of the git repository for the source :param git_ref: str, commit ID of the branch to be pulled :param git_branch: str, branch name of the branch to be pulled :param base_image: str, name of the parent image :param name_label: str, label of the parent image :param user: str, name of the user requesting the build :param component: str, name of the component :param release: str, :param build_image: str, :param build_imagestream: str, :param build_from: str, :param build_type: str, orchestrator or worker :param platforms: list of str, platforms to build on :param platform: str, platform :param koji_target: str, koji tag with packages used to build the image :param koji_task_id: str, koji ID :param koji_parent_build: str, :param koji_upload_dir: str, koji directory where the completed image will be uploaded :param flatpak: if we should build a Flatpak OCI Image :param flatpak_base_image: str, name of the Flatpack OCI Image :param reactor_config_map: str, name of the config map containing the reactor environment :param reactor_config_override: dict, data structure for reactor config to be injected as an environment variable into a worker build; when used, reactor_config_map is ignored. :param yum_repourls: list of str, uris of the yum repos to pull from :param signing_intent: bool, True to sign the resulting image :param compose_ids: list of int, ODCS composes to use instead of generating new ones :param filesystem_koji_task_id: int, Koji Task that created the base filesystem :param platform_node_selector: dict, a nodeselector for a user_paramsific platform :param scratch_build_node_selector: dict, a nodeselector for scratch builds :param explicit_build_node_selector: dict, a nodeselector for explicit builds :param auto_build_node_selector: dict, a nodeselector for auto builds :param isolated_build_node_selector: dict, a nodeselector for isolated builds :param operator_manifests_extract_platform: str, indicates which platform should upload operator manifests to koji :param parent_images_digests: dict, mapping image digests to names and platforms """ # Here we cater to the koji "scratch" build type, this will disable # all plugins that might cause importing of data to koji self.scratch = kwargs.get('scratch') # When true, it indicates build was automatically started by # OpenShift via a trigger, for instance ImageChangeTrigger self.is_auto = kwargs.pop('is_auto', False) # An isolated build is meant to patch a certain release and not # update transient tags in container registry self.isolated = kwargs.get('isolated') self.osbs_api = kwargs.pop('osbs_api', None) self.validate_build_variation() self.base_image = kwargs.get('base_image') self.platform_node_selector = kwargs.get('platform_node_selector', {}) self.scratch_build_node_selector = kwargs.get('scratch_build_node_selector', {}) self.explicit_build_node_selector = kwargs.get('explicit_build_node_selector', {}) self.auto_build_node_selector = kwargs.get('auto_build_node_selector', {}) self.isolated_build_node_selector = kwargs.get('isolated_build_node_selector', {}) logger.debug("now setting params '%s' for user_params", kwargs) self.user_params.set_params(**kwargs) self.source_registry = None self.organization = None
set parameters in the user parameters these parameters are accepted: :param git_uri: str, uri of the git repository for the source :param git_ref: str, commit ID of the branch to be pulled :param git_branch: str, branch name of the branch to be pulled :param base_image: str, name of the parent image :param name_label: str, label of the parent image :param user: str, name of the user requesting the build :param component: str, name of the component :param release: str, :param build_image: str, :param build_imagestream: str, :param build_from: str, :param build_type: str, orchestrator or worker :param platforms: list of str, platforms to build on :param platform: str, platform :param koji_target: str, koji tag with packages used to build the image :param koji_task_id: str, koji ID :param koji_parent_build: str, :param koji_upload_dir: str, koji directory where the completed image will be uploaded :param flatpak: if we should build a Flatpak OCI Image :param flatpak_base_image: str, name of the Flatpack OCI Image :param reactor_config_map: str, name of the config map containing the reactor environment :param reactor_config_override: dict, data structure for reactor config to be injected as an environment variable into a worker build; when used, reactor_config_map is ignored. :param yum_repourls: list of str, uris of the yum repos to pull from :param signing_intent: bool, True to sign the resulting image :param compose_ids: list of int, ODCS composes to use instead of generating new ones :param filesystem_koji_task_id: int, Koji Task that created the base filesystem :param platform_node_selector: dict, a nodeselector for a user_paramsific platform :param scratch_build_node_selector: dict, a nodeselector for scratch builds :param explicit_build_node_selector: dict, a nodeselector for explicit builds :param auto_build_node_selector: dict, a nodeselector for auto builds :param isolated_build_node_selector: dict, a nodeselector for isolated builds :param operator_manifests_extract_platform: str, indicates which platform should upload operator manifests to koji :param parent_images_digests: dict, mapping image digests to names and platforms
entailment
def set_data_from_reactor_config(self): """ Sets data from reactor config """ reactor_config_override = self.user_params.reactor_config_override.value reactor_config_map = self.user_params.reactor_config_map.value data = None if reactor_config_override: data = reactor_config_override elif reactor_config_map: config_map = self.osbs_api.get_config_map(reactor_config_map) data = config_map.get_data_by_key('config.yaml') if not data: if self.user_params.flatpak.value: raise OsbsValidationException("flatpak_base_image must be provided") else: return source_registry_key = 'source_registry' registry_organization_key = 'registries_organization' req_secrets_key = 'required_secrets' token_secrets_key = 'worker_token_secrets' flatpak_key = 'flatpak' flatpak_base_image_key = 'base_image' if source_registry_key in data: self.source_registry = data[source_registry_key] if registry_organization_key in data: self.organization = data[registry_organization_key] if self.user_params.flatpak.value: flatpack_base_image = data.get(flatpak_key, {}).get(flatpak_base_image_key, None) if flatpack_base_image: self.base_image = flatpack_base_image self.user_params.base_image.value = flatpack_base_image else: raise OsbsValidationException("flatpak_base_image must be provided") required_secrets = data.get(req_secrets_key, []) token_secrets = data.get(token_secrets_key, []) self._set_required_secrets(required_secrets, token_secrets)
Sets data from reactor config
entailment
def _set_required_secrets(self, required_secrets, token_secrets): """ Sets required secrets """ if self.user_params.build_type.value == BUILD_TYPE_ORCHESTRATOR: required_secrets += token_secrets if not required_secrets: return secrets = self.template['spec']['strategy']['customStrategy'].setdefault('secrets', []) existing = set(secret_mount['secretSource']['name'] for secret_mount in secrets) required_secrets = set(required_secrets) already_set = required_secrets.intersection(existing) if already_set: logger.debug("secrets %s are already set", already_set) for secret in required_secrets - existing: secret_path = os.path.join(SECRETS_PATH, secret) logger.info("Configuring %s secret at %s", secret, secret_path) secrets.append({ 'secretSource': { 'name': secret, }, 'mountPath': secret_path, })
Sets required secrets
entailment
def remove_plugin(self, phase, name, reason=None): """ if config contains plugin, remove it """ for p in self.template[phase]: if p.get('name') == name: self.template[phase].remove(p) if reason: logger.info('Removing {}:{}, {}'.format(phase, name, reason)) break
if config contains plugin, remove it
entailment
def add_plugin(self, phase, name, args, reason=None): """ if config has plugin, override it, else add it """ plugin_modified = False for plugin in self.template[phase]: if plugin['name'] == name: plugin['args'] = args plugin_modified = True if not plugin_modified: self.template[phase].append({"name": name, "args": args}) if reason: logger.info('{}:{} with args {}, {}'.format(phase, name, args, reason))
if config has plugin, override it, else add it
entailment
def get_plugin_conf(self, phase, name): """ Return the configuration for a plugin. Raises KeyError if there are no plugins of that type. Raises IndexError if the named plugin is not listed. """ match = [x for x in self.template[phase] if x.get('name') == name] return match[0]
Return the configuration for a plugin. Raises KeyError if there are no plugins of that type. Raises IndexError if the named plugin is not listed.
entailment
def has_plugin_conf(self, phase, name): """ Check whether a plugin is configured. """ try: self.get_plugin_conf(phase, name) return True except (KeyError, IndexError): return False
Check whether a plugin is configured.
entailment
def adjust_for_scratch(self): """ Remove certain plugins in order to handle the "scratch build" scenario. Scratch builds must not affect subsequent builds, and should not be imported into Koji. """ if self.user_params.scratch.value: remove_plugins = [ ("prebuild_plugins", "koji_parent"), ("postbuild_plugins", "compress"), # required only to make an archive for Koji ("postbuild_plugins", "pulp_pull"), # required only to make an archive for Koji ("postbuild_plugins", "compare_components"), ("postbuild_plugins", "import_image"), ("exit_plugins", "koji_promote"), ("exit_plugins", "koji_tag_build"), ("exit_plugins", "import_image"), ("prebuild_plugins", "check_and_set_rebuild"), ("prebuild_plugins", "stop_autorebuild_if_disabled") ] if not self.has_tag_suffixes_placeholder(): remove_plugins.append(("postbuild_plugins", "tag_from_config")) for when, which in remove_plugins: self.pt.remove_plugin(when, which, 'removed from scratch build request')
Remove certain plugins in order to handle the "scratch build" scenario. Scratch builds must not affect subsequent builds, and should not be imported into Koji.
entailment
def adjust_for_isolated(self): """ Remove certain plugins in order to handle the "isolated build" scenario. """ if self.user_params.isolated.value: remove_plugins = [ ("prebuild_plugins", "check_and_set_rebuild"), ("prebuild_plugins", "stop_autorebuild_if_disabled") ] for when, which in remove_plugins: self.pt.remove_plugin(when, which, 'removed from isolated build request')
Remove certain plugins in order to handle the "isolated build" scenario.
entailment
def adjust_for_flatpak(self): """ Remove plugins that don't work when building Flatpaks """ if self.user_params.flatpak.value: remove_plugins = [ ("prebuild_plugins", "resolve_composes"), # We'll extract the filesystem anyways for a Flatpak instead of exporting # the docker image directly, so squash just slows things down. ("prepublish_plugins", "squash"), # Pulp can't currently handle Flatpaks, which are OCI images ("postbuild_plugins", "pulp_push"), ("postbuild_plugins", "pulp_tag"), ("postbuild_plugins", "pulp_sync"), ("exit_plugins", "pulp_publish"), ("exit_plugins", "pulp_pull"), # delete_from_registry is used for deleting builds from the temporary registry # that pulp_sync mirrors from. ("exit_plugins", "delete_from_registry"), ] for when, which in remove_plugins: self.pt.remove_plugin(when, which, 'not needed for flatpak build')
Remove plugins that don't work when building Flatpaks
entailment
def render_customizations(self): """ Customize template for site user specified customizations """ disable_plugins = self.pt.customize_conf.get('disable_plugins', []) if not disable_plugins: logger.debug('No site-user specified plugins to disable') else: for plugin in disable_plugins: try: self.pt.remove_plugin(plugin['plugin_type'], plugin['plugin_name'], 'disabled at user request') except KeyError: # Malformed config logger.info('Invalid custom configuration found for disable_plugins') enable_plugins = self.pt.customize_conf.get('enable_plugins', []) if not enable_plugins: logger.debug('No site-user specified plugins to enable"') else: for plugin in enable_plugins: try: msg = 'enabled at user request' self.pt.add_plugin(plugin['plugin_type'], plugin['plugin_name'], plugin['plugin_args'], msg) except KeyError: # Malformed config logger.info('Invalid custom configuration found for enable_plugins')
Customize template for site user specified customizations
entailment
def render_koji(self): """ if there is yum repo in user params, don't pick stuff from koji """ phase = 'prebuild_plugins' plugin = 'koji' if not self.pt.has_plugin_conf(phase, plugin): return if self.user_params.yum_repourls.value: self.pt.remove_plugin(phase, plugin, 'there is a yum repo user parameter') elif not self.pt.set_plugin_arg_valid(phase, plugin, "target", self.user_params.koji_target.value): self.pt.remove_plugin(phase, plugin, 'no koji target supplied in user parameters')
if there is yum repo in user params, don't pick stuff from koji
entailment