blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d706cbc2c581af29582c417ee42d30c6d487eef0 | ad715f9713dc5c6c570a5ac51a18b11932edf548 | /tensorflow/lite/testing/op_tests/scatter_nd.py | 8a365ae5b96365937c5c2c28468aa81e1870ed84 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | rockzhuang/tensorflow | f1f31bc8edfa402b748c500efb97473c001bac95 | cb40c060b36c6a75edfefbc4e5fc7ee720273e13 | refs/heads/master | 2022-11-08T20:41:36.735747 | 2022-10-21T01:45:52 | 2022-10-21T01:45:52 | 161,580,587 | 27 | 11 | Apache-2.0 | 2019-01-23T11:00:44 | 2018-12-13T03:47:28 | C++ | UTF-8 | Python | false | false | 2,856 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for scatter_nd."""
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_scatter_nd_tests(options):
"""Make a set of tests to do scatter_nd."""
test_parameters = [{
"indices_dtype": [tf.int32],
"indices_shape": [[4, 1]],
"indices_value": [[[4], [3], [1], [7]]],
"updates_dtype": [tf.int32, tf.int64, tf.float32, tf.bool],
"updates_shape": [[4]],
"shape_dtype": [tf.int32],
"shape_shape": [[1]],
"shape_value": [[8]]
}, {
"indices_dtype": [tf.int32],
"indices_shape": [[4, 2]],
"indices_value": [[[0, 0], [1, 0], [0, 2], [1, 2]]],
"updates_dtype": [tf.int32, tf.int64, tf.float32, tf.bool],
"updates_shape": [[4, 5]],
"shape_dtype": [tf.int32],
"shape_shape": [[3]],
"shape_value": [[2, 3, 5]]
}]
def build_graph(parameters):
"""Build the scatter_nd op testing graph."""
indices = tf.compat.v1.placeholder(
dtype=parameters["indices_dtype"],
name="indices",
shape=parameters["indices_shape"])
updates = tf.compat.v1.placeholder(
dtype=parameters["updates_dtype"],
name="updates",
shape=parameters["updates_shape"])
shape = tf.compat.v1.placeholder(
dtype=parameters["shape_dtype"],
name="shape",
shape=parameters["shape_shape"])
out = tf.scatter_nd(indices, updates, shape)
return [indices, updates, shape], [out]
def build_inputs(parameters, sess, inputs, outputs):
indices = np.array(parameters["indices_value"])
updates = create_tensor_data(parameters["updates_dtype"],
parameters["updates_shape"])
shape = np.array(parameters["shape_value"])
return [indices, updates, shape], sess.run(
outputs, feed_dict=dict(zip(inputs, [indices, updates, shape])))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
b347b4988d4ba504e2cb28606864befe47bcb34d | 7ba6fc111bdbce41a640e17c3ed1d0b2cbd0f433 | /app/views/main.py | c430018c84a6375912a86244b89a9444558ad344 | [] | no_license | arush15june/circl-cve | 9a2a1680bdf353241afdf278a690a89c9cbec06b | 49b354d0a0a0d4d259f7458ea7c0e56e48301552 | refs/heads/master | 2020-07-28T19:12:22.189142 | 2019-08-07T11:50:21 | 2019-08-07T11:50:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,556 | py | # -*- coding: utf-8 -*-
import syslog
from flask import Blueprint, render_template, flash, redirect, url_for, request, jsonify, escape
from flask_login import login_required, current_user
from sqlalchemy import desc
import os
import form_class
import models
from utils import admin_required, gpg
main_blueprint = Blueprint('main', __name__)
@main_blueprint.route('/', methods=['GET', 'POST'])
def index():
return render_template("index.html")
@main_blueprint.route('/admin', methods=['GET', 'POST'])
@login_required
@admin_required
def admin_home():
newform = form_class.RegistrationForm()
updateform = form_class.UpdateUserForm()
deleteform = form_class.DeleteUserForm()
if deleteform.validate_on_submit():
if deleteform.id.data != '1':
user = models.User.query.filter_by(id=deleteform.id.data).first()
models.db.session.delete(user)
models.db.session.commit()
flash('User successfully deleted', 'success')
syslog.syslog(syslog.LOG_CRIT, "Admin: " + current_user.email + " deleted User: " + user.email)
else:
flash("Sorry but you just can't delete this admin.", 'danger')
syslog.syslog(syslog.LOG_ALERT, "User wanted to delete admin" + current_user.email)
return redirect(url_for('main.admin_home'))
elif newform.validate_on_submit():
ki = gpg.import_keys(newform.pgp.data)
if not ki.fingerprints:
fingerp = "--- NO VALID PGP ---"
else:
fingerp = ki.fingerprints[0]
user = models.User(email=escape(newform.email.data),
name=escape(newform.name.data),
affiliation=escape(newform.affiliation.data),
pgp=newform.pgp.data,
password=newform.password.data,
fingerprint=fingerp,
confirmed=True)
models.db.session.add(user)
models.db.session.commit()
syslog.syslog(syslog.LOG_WARNING, "Admin: " + current_user.email + " created User: " + user.email)
flash('User successfully created.', 'success')
return redirect(url_for('main.admin_home'))
elif updateform.validate_on_submit():
ki = gpg.import_keys(updateform.pgp.data)
if not ki.fingerprints:
fingerp = "--- NO VALID PGP ---"
else:
fingerp = ki.fingerprints[0]
user = models.User.query.filter_by(id=updateform.id.data).first()
user.name = escape(updateform.name.data)
user.affiliation = escape(updateform.affiliation.data)
user.fingerprint = fingerp
user.pgp = updateform.pgp.data
if updateform.id.data != '1':
listemail = []
for user in models.User.query.all():
listemail.append(user.email)
if updateform.email.data not in listemail or updateform.email.data == models.User.query.filter_by(
id=updateform.id.data).first().email:
user.email = escape(updateform.email.data)
user.confirmed = updateform.confirmed.data
user.role = models.Role.query.get(updateform.role.data)
else:
syslog.syslog(syslog.LOG_ERR,
"Admin: " + current_user.email + " Tried to assign existing email to user: " + user.email)
flash('Email already existing', 'warning')
return redirect(url_for('main.admin_home'))
else:
user.role = models.Role.query.get('1')
user.email = os.getenv('PORTAL_ADMIN')
user.confirmed = True
syslog.syslog(syslog.LOG_ALERT,
"Admin: " + current_user.email + " Tried to remove right of Admin: " + user.email)
models.db.session.add(user)
models.db.session.commit()
syslog.syslog(syslog.LOG_WARNING, "Admin: " + current_user.email + " updated User: " + user.email)
flash('User successfully updated', 'success')
return redirect(url_for('main.admin_home'))
return render_template("admin_home.html",
newform=newform,
updateform=updateform,
deleteform=deleteform)
@main_blueprint.route('/userjson', methods=['GET', 'POST'])
@login_required
@admin_required
def userjson():
jusers = []
dic = {}
limit = request.args.get('limit')
offset = request.args.get('offset')
order = request.args.get('order')
if 'users.' + request.args.get('sort') in models.User.__table__.columns:
sort = request.args.get('sort')
else:
sort = 'name'
if order == 'desc':
user_list = models.User.query.order_by(desc(sort)).limit(limit).offset(offset).all()
else:
user_list = models.User.query.order_by(sort).limit(limit).offset(offset).all()
num = models.User.query.count()
for user in user_list:
dusers = {'id': user.id,
'name': user.name,
'email': user.email,
'affiliation': user.affiliation,
'pgp': user.pgp,
'fingerprint': user.fingerprint,
'confirmed': user.confirmed,
'role_id2': user.role_id,
'role_id': (models.Role.query.filter_by(id=user.role_id).first()).name
}
jusers.append(dusers)
dic['total'] = num
dic['rows'] = jusers
return jsonify(dic)
| [
"swapnilansh182@gmail.com"
] | swapnilansh182@gmail.com |
ee1b7226a561379a914fa3fd752b3bd10cec0b11 | 4b5ef8e75387ffdc2e3e310938aa1ccad5fb3a45 | /drop_and_grab_algorithm.py | 759b12c43e99e2fe571faf383ef7ee68155a14f3 | [] | no_license | JeDeveloper/qscout | baa35c884a69880ffbcea93a14d22e382f4c6bc7 | 93fa90d82eec22fde09940dccfd2a1ab93b7f825 | refs/heads/master | 2023-02-21T18:34:51.542873 | 2021-01-11T21:34:41 | 2021-01-11T21:34:41 | 320,684,236 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,693 | py | from qgis.core import (QgsProcessingParameterFile)
from qgis import processing
from .qscout_pin_algorithm import *
from .pin_dropper_algorithm import *
from .grid_aggregator_algorithm import *
from .value_grabber_algorithm import QScoutValueGrabberAlgorithm, band_field
class DropAndGrabAlgoithm(QgsProcessingAlgorithm):
DROP_AND_GRAB_GRID_OUT = 'DROP_AND_GRAB_GRID_OUT'
DROP_AND_GRAB_POINTS_OUT = 'DROP_AND_GRAB_POINTS_OUT'
def initAlgorithm(self, config):
# QSCOUT PIN ALGORITHM PARAMS
# raster layer. repeating pattern in the raster will be used to drop pins
self.addParameter(
QgsProcessingParameterRasterLayer(
QScoutPinAlgorithm.TARGETING_RASTER_INPUT,
self.tr('Targeting Raster'),
[QgsProcessing.TypeRaster],
optional=True
)
)
# bounding box
self.addParameter(
QgsProcessingParameterFeatureSource(
QScoutPinAlgorithm.BOUND_POLYGON_INPUT,
self.tr('Bounding Box'),
[QgsProcessing.TypeVectorPolygon]
)
)
# direction vector for rows
self.addParameter(
QgsProcessingParameterFeatureSource(
QScoutPinAlgorithm.ROW_VECTOR_INPUT,
self.tr('Row Vector'),
[QgsProcessing.TypeVectorLine],
)
)
# rating function
param = QgsProcessingParameterEnum(
QScoutPinAlgorithm.RATE_OFFSET_MATCH_FUNCTION_INPUT,
self.tr("Rate Offset Match Function"),
options=MATCH_FUNCTIONS,
defaultValue=0 # nothing I write here makes any difference
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# whether to compare from root
param = QgsProcessingParameterBoolean(
QScoutPinAlgorithm.COMPARE_FROM_ROOT_INPUT,
self.tr("Compare from Root"),
defaultValue=False
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# row height
self.addParameter(
QgsProcessingParameterDistance(
QScoutPinAlgorithm.ROW_SPACING_INPUT,
self.tr('Row Spacing'),
parentParameterName=QScoutPinAlgorithm.BOUND_POLYGON_INPUT,
minValue=0
)
)
# point interval
self.addParameter(
QgsProcessingParameterDistance(
QScoutPinAlgorithm.POINT_INTERVAL_INPUT,
self.tr('Point Interval'),
parentParameterName=QScoutPinAlgorithm.BOUND_POLYGON_INPUT,
minValue=0
)
)
# overlay box radius
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.OVERLAY_BOX_RADIUS_INPUT,
self.tr('Overlay Box Radius'),
type=QgsProcessingParameterNumber.Integer,
minValue=0,
defaultValue=2
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# match threshold
self.addParameter(
QgsProcessingParameterNumber(
QScoutPinAlgorithm.OVERLAY_MATCH_THRESHOLD_INPUT,
self.tr("Match Threshold"),
type=QgsProcessingParameterNumber.Double,
minValue=0,
maxValue=1,
defaultValue=.85, # this number has absolutely no scientific or mathematical basis
)
)
self.addParameter(
QgsProcessingParameterEnum(
QScoutPinAlgorithm.START_CORNER_INPUT,
self.tr("Start Corner"),
options=START_CORNERS,
defaultValue=0
)
)
# patch size
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.PATCH_SIZE_INPUT,
self.tr('Maximum Patch Size'),
type=QgsProcessingParameterNumber.Integer,
minValue=0,
defaultValue=2
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# optional parameters
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.ROW_SPACING_STDEV_INPUT,
self.tr('Row Spacing Stdev'),
type=QgsProcessingParameterNumber.Double,
minValue=0,
optional=True
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# patch size
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.POINT_INTERVAL_STDEV_INPUT,
self.tr('Point Interval Stdev'),
type=QgsProcessingParameterNumber.Double,
minValue=0,
optional=True
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# search iteration size
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.SEARCH_ITERATION_SIZE_INPUT,
self.tr("Search Iteration Size"),
type=QgsProcessingParameterNumber.Integer,
minValue=2,
defaultValue=5
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# number of search iteGridAggregatorAlgorithmrations
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.SEARCH_NUM_ITERATIONS_INPUT,
self.tr("Number of Search Iterations"),
type=QgsProcessingParameterNumber.Integer,
minValue=1,
defaultValue=2
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# precision bias coefficient
param = QgsProcessingParameterNumber(
QScoutPinAlgorithm.PRECISION_BIAS_COEFFICIENT_INPUT,
self.tr("Precision Bias Coefficient"),
type=QgsProcessingParameterNumber.Double,
minValue=0,
defaultValue=0
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# PIN DROPPER PARAMS
# fields to use
param = QgsProcessingParameterString(
QScoutPinDropperAlgorithm.DATA_SOURCE_FIELDS_TO_USE,
self.tr("Fields to Use"),
optional=True
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# panel size
param = QgsProcessingParameterNumber(
QScoutPinDropperAlgorithm.PANEL_SIZE_INPUT,
self.tr("Panel Size"),
minValue=0,
defaultValue=0
)
param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(param)
# drop data-less points
self.addParameter(
QgsProcessingParameterBoolean(
QScoutPinDropperAlgorithm.DROP_DATALESS_POINTS_INPUT,
self.tr("Drop Data-less Points"),
defaultValue=False # should maybe change to false in production version
)
)
# input data
self.addParameter(
QgsProcessingParameterFile(
QScoutPinDropperAlgorithm.DATA_SOURCE_INPUT,
self.tr("Input Data"),
optional=True
)
)
# VALUE GRABBER PARAMS
# have to use QgsProcessingParameterFile to account for rasters too large to load in qgis
self.addParameter(
QgsProcessingParameterFile(
QScoutValueGrabberAlgorithm.RASTER_INPUT,
self.tr("Raster File Input")
)
)
# GRID AGGREGATOR PARAMS
self.addParameter(
QgsProcessingParameterDistance(
QScoutGridAggregatorAlgorithm.GRID_CELL_W_INPUT,
self.tr("Grid Cell Width"),
parentParameterName=QScoutPinAlgorithm.BOUND_POLYGON_INPUT,
minValue=0
)
)
self.addParameter(
QgsProcessingParameterDistance(
QScoutGridAggregatorAlgorithm.GRID_CELL_H_INPUT,
self.tr("Grid Cell Height"),
parentParameterName=QScoutPinAlgorithm.BOUND_POLYGON_INPUT,
minValue=0
)
)
self.addParameter(
QgsProcessingParameterEnum(
QScoutGridAggregatorAlgorithm.AGGREGATION_FUNCTION_INPUT,
self.tr("Aggregation Function"),
options=AGGREGATION_FUNCTIONS,
defaultValue=0
)
)
self.addParameter(
QgsProcessingParameterFeatureSink(
self.DROP_AND_GRAB_POINTS_OUT,
self.tr("Points Output")
)
)
self.addParameter(
QgsProcessingParameterFeatureSink(
self.DROP_AND_GRAB_GRID_OUT,
self.tr("Aggregate Grid")
)
)
def flags(self):
return super(DropAndGrabAlgoithm, self).flags() | QgsProcessingAlgorithm.FlagNoThreading
def processAlgorithm(self, parameters, context, feedback):
# QSCOUT PARAMETERS
# required parameters
target_raster = self.parameterAsRasterLayer(parameters, QScoutPinAlgorithm.TARGETING_RASTER_INPUT, context)
bound_box_layer = self.parameterAsVectorLayer(parameters, QScoutPinAlgorithm.BOUND_POLYGON_INPUT, context)
overlay_box_radius = self.parameterAsDouble(parameters, QScoutPinAlgorithm.OVERLAY_BOX_RADIUS_INPUT, context)
col_w = self.parameterAsDouble(parameters, QScoutPinAlgorithm.POINT_INTERVAL_INPUT, context)
row_h = self.parameterAsDouble(parameters, QScoutPinAlgorithm.ROW_SPACING_INPUT, context)
row_vector_layer = self.parameterAsVectorLayer(parameters, QScoutPinAlgorithm.ROW_VECTOR_INPUT, context)
# optional parameters
row_h_stdev = self.parameterAsDouble(parameters, QScoutPinAlgorithm.ROW_SPACING_STDEV_INPUT, context)
point_interval_stdev = self.parameterAsDouble(parameters, QScoutPinAlgorithm.POINT_INTERVAL_STDEV_INPUT, context)
overlay_match_min_threshold = self.parameterAsDouble(parameters, QScoutPinAlgorithm.OVERLAY_MATCH_THRESHOLD_INPUT,
context)
search_iter_count = self.parameterAsInt(parameters, QScoutPinAlgorithm.SEARCH_NUM_ITERATIONS_INPUT, context)
search_iter_size = self.parameterAsInt(parameters, QScoutPinAlgorithm.SEARCH_ITERATION_SIZE_INPUT, context)
patch_size = self.parameterAsInt(parameters, QScoutPinAlgorithm.PATCH_SIZE_INPUT, context)
offset_func_idx = self.parameterAsEnum(parameters, QScoutPinAlgorithm.RATE_OFFSET_MATCH_FUNCTION_INPUT, context)
compare_from_root = self.parameterAsBool(parameters, QScoutPinAlgorithm.COMPARE_FROM_ROOT_INPUT, context)
precision_bias_coeff = self.parameterAsDouble(parameters, QScoutPinAlgorithm.PRECISION_BIAS_COEFFICIENT_INPUT, context)
start_corner = self.parameterAsEnum(parameters, QScoutPinAlgorithm.START_CORNER_INPUT, context)
# PIN DROPPER PARAMS
data_source = self.parameterAsFile(parameters, QScoutPinDropperAlgorithm.DATA_SOURCE_INPUT, context)
drop_dataless_points = self.parameterAsBool(parameters, QScoutPinDropperAlgorithm.DROP_DATALESS_POINTS_INPUT, context)
fields_to_use = self.parameterAsString(parameters, QScoutPinDropperAlgorithm.DATA_SOURCE_FIELDS_TO_USE, context)
panel_size = self.parameterAsInt(parameters, QScoutPinDropperAlgorithm.PANEL_SIZE_INPUT, context)
pin_dropper_alg_params = {
QScoutPinAlgorithm.TARGETING_RASTER_INPUT: target_raster,
QScoutPinAlgorithm.BOUND_POLYGON_INPUT: bound_box_layer,
QScoutPinAlgorithm.OVERLAY_BOX_RADIUS_INPUT: overlay_box_radius,
QScoutPinAlgorithm.POINT_INTERVAL_INPUT: col_w,
QScoutPinAlgorithm.ROW_SPACING_INPUT: row_h,
QScoutPinAlgorithm.ROW_VECTOR_INPUT: row_vector_layer,
QScoutPinAlgorithm.ROW_SPACING_STDEV_INPUT: row_h_stdev,
QScoutPinAlgorithm.POINT_INTERVAL_STDEV_INPUT: point_interval_stdev,
QScoutPinAlgorithm.OVERLAY_MATCH_THRESHOLD_INPUT: overlay_match_min_threshold,
QScoutPinAlgorithm.SEARCH_NUM_ITERATIONS_INPUT: search_iter_count,
QScoutPinAlgorithm.SEARCH_ITERATION_SIZE_INPUT: search_iter_size,
QScoutPinAlgorithm.PATCH_SIZE_INPUT: patch_size,
QScoutPinAlgorithm.RATE_OFFSET_MATCH_FUNCTION_INPUT: offset_func_idx,
QScoutPinAlgorithm.COMPARE_FROM_ROOT_INPUT: compare_from_root,
QScoutPinAlgorithm.PRECISION_BIAS_COEFFICIENT_INPUT: precision_bias_coeff,
QScoutPinAlgorithm.START_CORNER_INPUT: start_corner,
QScoutPinDropperAlgorithm.DATA_SOURCE_INPUT: data_source,
QScoutPinDropperAlgorithm.DROP_DATALESS_POINTS_INPUT: drop_dataless_points,
QScoutPinDropperAlgorithm.DATA_SOURCE_FIELDS_TO_USE: fields_to_use,
QScoutPinDropperAlgorithm.PANEL_SIZE_INPUT: panel_size,
QScoutPinDropperAlgorithm.DROPPED_PINS_OUTPUT: "memory:" # I promise I read this somewhere
}
# this processing algorithm produces a vector layer of pin geometry type
pin_drop_out = processing.run("QScout:droppins", pin_dropper_alg_params,
context=context, feedback=feedback, is_child_algorithm=True)
pin_drop_out = pin_drop_out[QScoutPinDropperAlgorithm.DROPPED_PINS_OUTPUT]
vals_raster = self.parameterAsFile(parameters, QScoutValueGrabberAlgorithm.RASTER_INPUT, context)
# VALUE GRABBER PARAMS
grab_alg_params = {
QScoutValueGrabberAlgorithm.RASTER_INPUT: vals_raster,
QScoutValueGrabberAlgorithm.POINTS_INPUT: pin_drop_out,
QScoutValueGrabberAlgorithm.POINTS_WITH_VALUES_OUTPUT: parameters[self.DROP_AND_GRAB_POINTS_OUT]
}
# this processing algorithm produces a vector layer of pin geometry type
points_layer_id = processing.runAndLoadResults("QScout:valuegrab", grab_alg_params,
context=context, feedback=feedback)
points_layer_id = points_layer_id[QScoutValueGrabberAlgorithm.POINTS_WITH_VALUES_OUTPUT]
points_layer = QgsProject.instance().mapLayer(points_layer_id)
# GRID AGGREGATOR PARAMS
grid_w = self.parameterAsDouble(parameters, QScoutGridAggregatorAlgorithm.GRID_CELL_W_INPUT, context)
grid_h = self.parameterAsDouble(parameters, QScoutGridAggregatorAlgorithm.GRID_CELL_H_INPUT, context)
ag_idx = self.parameterAsEnum(parameters, QScoutGridAggregatorAlgorithm.AGGREGATION_FUNCTION_INPUT, context)
# this is a bit wonky
fields_to_use_list = map(lambda f: f.strip(), fields_to_use.split(","))
ag_fields_list = points_layer.fields()
regexes = "|".join(map(lambda r: "(%s)" % r, [ROW_REGEX, COL_REGEX, VINE_REGEX, PANEL_REGEX]))
ag_fields_list = filter(lambda f:
(not fields_to_use or f.name() in fields_to_use_list)
and not re.match(regexes, f.name())
and (f.type() == QVariant.Int or f.type() == QVariant.Double),
ag_fields_list)
ag_fields = ";".join(map(lambda f: f.name(), ag_fields_list))
# for field in ag_fields_list:
# ag_fields.append(field)
grid_ag_alg_params = {
QScoutValueGrabberAlgorithm.POINTS_INPUT: points_layer,
QScoutGridAggregatorAlgorithm.GRID_CELL_W_INPUT: grid_w,
QScoutGridAggregatorAlgorithm.GRID_CELL_H_INPUT: grid_h,
QScoutGridAggregatorAlgorithm.AGGREGATION_FUNCTION_INPUT: ag_idx,
QScoutGridAggregatorAlgorithm.FIELDS_TO_USE_INPUT: ag_fields,
QScoutGridAggregatorAlgorithm.AGGREGATE_GRID_OUTPUT: parameters[self.DROP_AND_GRAB_GRID_OUT]
}
# this plugin produces a vector layer of polygon geometry type
grid_alg_out = processing.runAndLoadResults("QScout:gridaggregator", grid_ag_alg_params,
context=context, feedback=feedback)
ag_layer_id = grid_alg_out[QScoutGridAggregatorAlgorithm.AGGREGATE_GRID_OUTPUT]
return {self.DROP_AND_GRAB_POINTS_OUT: points_layer_id, self.DROP_AND_GRAB_GRID_OUT: ag_layer_id}
#
# def processAlgorithm(self, parameters, context, feedback):
# fields_to_use = self.parameterAsString(parameters, QScoutPinDropperAlgorithm.DATA_SOURCE_FIELDS_TO_USE, context)
#
# # QScoutPinDropperAlgorithm should find input layer correctly
# # set up empty list to buffer output points from QScoutPinDropperAlgorithm
# self.output_sink = []
# self.extra_sink = None
# QScoutPinDropperAlgorithm.processAlgorithm(self, parameters, context, feedback)
# self.input_buffer = self.output_sink # set output buffer as input for QScoutValueGrabberAlgorithm
# self.output_sink = None # create a layer this time
# self.extra_sink = [] # to be read by aggregator
# points_out_id = QScoutValueGrabberAlgorithm.processAlgorithm(self, parameters, context, feedback)[self.POINTS_WITH_VALUES_OUTPUT]
#
# # this is a bit wonky
# fields_to_use_list = map(lambda f: f.strip(), fields_to_use.split(","))
# # at this point, self.output_sink is the QgsFeatureSink created by QScoutValueGrabberAlgorthm.processAlgorithm
# ag_fields = self.output_sink.fields()
# regexes = "|".join(map(lambda r: "(%s)" % r, [ROW_REGEX, COL_REGEX, VINE_REGEX, PANEL_REGEX]))
# ag_fields = filter(lambda f:
# (not fields_to_use or f.name() in fields_to_use_list)
# and not re.match(regexes, f.name())
# and (f.type() == QVariant.Int or f.type() == QVariant.Double),
# ag_fields)
# ag_fields = map(lambda f: f.name(), ag_fields)
# parameters[QScoutGridAggregatorAlgorithm.FIELDS_TO_USE_INPUT] = ag_fields # how will grid aggregator actually handle this? we will find out.
#
# self.input_buffer = self.extra_sink
# self.output_sink = None
# self.extra_sink = None # don't waste memory
# grid_out_id = QScoutGridAggregatorAlgorithm.processAlgorithm(self, parameters, context, feedback)[self.AGGREGATE_GRID_OUTPUT]
#
# return {self.DROP_AND_GRAB_POINTS_OUT: points_out_id, self.DROP_AND_GRAB_GRID_OUT: grid_out_id}
def name(self):
return "dropandgrab"
def displayName(self):
"""
Returns the translated algorithm name, which should be used for any
user-visible display of the algorithm name.
"""
return self.tr("Drop Pins and Grid Grab")
def group(self):
"""
Returns the name of the group this algorithm belongs to. This string
should be localised.
"""
return self.tr("QScout")
def groupId(self):
"""
Returns the unique ID of the group this algorithm belongs to. This
string should be fixed for the algorithm, and must not be localised.
The group id should be unique within each provider. Group id should
contain lowercase alphanumeric characters only and no spaces or other
formatting characters.
"""
return 'qscout'
def tr(self, string):
return QCoreApplication.translate('Processing', string)
def createInstance(self):
return DropAndGrabAlgoithm()
@abstractmethod
def feature_input(self):
"""
should return an iterable, generally either a QgsFeatureIterator or list
"""
return self.input_buffer
@abstractmethod
def feature_output(self):
"""
should return an instance of either QgsFeatureSink or list
"""
return self.output_sink
def append_to_feature_output(self, feat, count=0):
if self.extra_sink is not None:
self.extra_sink.append(feat)
return super().append_to_feature_output(feat, count) | [
"joshuaevanslowell@gmail.com"
] | joshuaevanslowell@gmail.com |
36f0c2503555c6c05554aba80e07ccabf726f6b2 | 7337c58669cba9e87adeb195a7bfe689eb75ff58 | /src/edinet/app/eagle/migrations/0001_initial.py | 4db83e60a3d07f24a6e1a2767ae8ab1f7ca2d554 | [
"Apache-2.0"
] | permissive | ryuichi1208/air-pipeline | 71d6542f6cced966d5877f1f56baa42639b4f4c2 | c98382ea95cbef6a7dc529281a474ee33bede7cb | refs/heads/master | 2023-05-28T15:00:35.312378 | 2022-11-16T17:00:21 | 2022-11-16T17:00:21 | 229,581,131 | 0 | 0 | Apache-2.0 | 2023-05-08T20:29:24 | 2019-12-22T14:33:50 | Python | UTF-8 | Python | false | false | 5,017 | py | # Generated by Django 2.2.2 on 2019-07-03 04:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('local_name', models.TextField()),
('global_name', models.TextField()),
],
),
migrations.CreateModel(
name='Document',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('period_start', models.DateField(null=True)),
('period_end', models.DateField(null=True)),
('submitted_date', models.DateTimeField()),
('lang', models.CharField(max_length=2)),
('path', models.TextField()),
('company', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='eagle.Company')),
],
),
migrations.CreateModel(
name='Feature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('unit', models.CharField(max_length=3)),
('ground', models.TextField()),
('document', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='eagle.Document')),
],
),
migrations.CreateModel(
name='EDINETCompany',
fields=[
('company_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='eagle.Company')),
('edinet_code', models.CharField(max_length=6)),
('jcn', models.CharField(max_length=13, null=True)),
('sec_code', models.CharField(max_length=5, null=True)),
('fund_code', models.CharField(max_length=6, null=True)),
],
bases=('eagle.company',),
),
migrations.CreateModel(
name='NumberOfExecutives',
fields=[
('feature_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='eagle.Feature')),
('value', models.IntegerField()),
],
bases=('eagle.feature',),
),
migrations.CreateModel(
name='EDINETDocument',
fields=[
('document_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='eagle.Document')),
('xbrl_path', models.TextField()),
('pdf_path', models.TextField()),
('edinet_document_id', models.CharField(max_length=8)),
('edinet_document_type', models.CharField(max_length=3)),
('title', models.TextField()),
('ordinance_code', models.CharField(max_length=3)),
('form_code', models.CharField(max_length=6)),
('issuer_edinet_code', models.CharField(max_length=6, null=True)),
('subject_edinet_code', models.CharField(max_length=6, null=True)),
('subsidiary_edinet_code', models.CharField(max_length=6, null=True)),
('submit_reason', models.TextField(null=True)),
('operated_date', models.DateTimeField(null=True)),
('withdraw_status', models.CharField(max_length=1)),
('operation_status', models.CharField(max_length=1)),
('disclosure_status', models.CharField(max_length=1)),
('has_attachment', models.BooleanField()),
('has_xbrl', models.BooleanField()),
('has_pdf', models.BooleanField()),
('has_english_doc', models.BooleanField()),
('parent_document_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='eagle.EDINETDocument')),
],
bases=('eagle.document',),
),
migrations.CreateModel(
name='CompanyData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year_month', models.DateField()),
('company', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='eagle.Company')),
('number_of_executives', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='eagle.NumberOfExecutives')),
],
options={
'unique_together': {('company', 'year_month')},
},
),
]
| [
"ryucrosskey@gmail.com"
] | ryucrosskey@gmail.com |
56e863d35664bee987c60e191072ea3ecebfd4c6 | 0ab0403303090c3ebaa6b55635c1247576eb1dbb | /models/quantize_affine.py | 8fea43ff96fb14aaacb4c3cf72634ca5ae07aaf4 | [] | no_license | silvrwolfboy/learning-compressible-subspaces | c8bc0d89c45f8894a318b2a74601a819deeb84a4 | 308f5216c7b0c15a0ebc88c7cec3e7d28c82b6c1 | refs/heads/master | 2023-08-28T06:32:33.669751 | 2021-10-27T16:22:28 | 2021-10-27T16:22:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,297 | py | #
# For licensing see accompanying LICENSE file.
# Copyright (C) 2021 Apple Inc. All Rights Reserved.
#
import collections
import numbers
from typing import Any
from typing import Optional
import numpy as np
import torch
from torch import autograd
from torch import nn
from .special_tensors import RepresentibleByQuantizeAffine
from .special_tensors import tag_with_metadata
QuantizeAffineParams2 = collections.namedtuple(
"QuantizeAffineParams", ["scale", "zero_point", "num_bits"]
)
INFINITY = 1e10
def _validate_tensor(tensor: torch.Tensor) -> None:
if torch.isnan(tensor).any():
raise ValueError("Found NaN in the tensor.")
if tensor.abs().max() > INFINITY:
raise ValueError(
"Tensor seems to be diverging. Found a value > {}".format(INFINITY)
)
def get_quantized_representation(
tensor: torch.Tensor,
quantize_params: QuantizeAffineParams2,
) -> torch.Tensor:
"""Gets the quantize representation of a float @tensor.
The resulting tensor will contain the quantized values and the quantization
parameters will be tagged with the tensor as a special tensor.
A ValueError will be raised if the given tensor contains NaN or divergent
values.
Arguments:
tensor (torch.Tensor): The float torch tensor to quantize.
quantize_params (QuantizeAffineParams): The quantization params to
quantize the tensor by.
"""
_validate_tensor(tensor)
scale = quantize_params.scale
zero_point = quantize_params.zero_point
num_bits = quantize_params.num_bits
if scale == 0:
# Special case, all elements are zeros.
if zero_point != 0:
raise ValueError(
"The given QuantizeAffineParams (={}) has a non-zero zero point"
" with a scale of 0.".format(quantize_params)
)
quantized_tensor = torch.zeros_like(tensor)
tag_with_metadata(quantized_tensor, quantize_params)
return quantized_tensor
qmin, qmax = get_qmin_qmax(num_bits)
reciprocal = 1 / scale
quantized_tensor = ((tensor * reciprocal).round_() + zero_point).clamp_(
qmin, qmax
)
tag_with_metadata(quantized_tensor, quantize_params)
return quantized_tensor
def mark_quantize_affine(
tensor: torch.Tensor,
scale: float,
zero_point: int,
dtype: np.dtype = np.uint8,
) -> None:
"""Mark a tensor as quantized with affine.
Arguments:
tensor (torch.Tensor): The tensor to be marked as affine-quantizable
Tensor.
scale (float): the scale (from quantization parameters).
zero_point (int): The zero_point (from quantization parameters).
dtype (numpy.dtype): Type of tensor when quantized (this is usually
numpy.uint8, which is used for Q8). A ValueError will be thrown if
the input dtype is not one of the following:
{numpy.uint8, numpy.int32}.
"""
quant_params = QuantizeAffineParams2(scale, zero_point, dtype)
tag_with_metadata(tensor, RepresentibleByQuantizeAffine(quant_params))
class QuantizeAffineFunction(autograd.Function):
"""Simulates affect of affine quantization during forward pass.
This function simulates the affect of quantization and subsequent
dequantization (in the forward pass only). Although the affine
transformation results in a different basis (e.g. uint8), the output of this
function will be a float Tensor representing that transformation (the
dequantized Tensor).
A ValueError will be raised if the input or resulting tensor contains NaN or
divergent values.
Arguments:
input (Tensor): The input float Tensor to be quantized.
quantize_params (quantize_affine_util.QuantizeAffineParams): The
quantization parameter to quantize the input tensor by.
"""
@staticmethod
def forward(
ctx: Any,
input: torch.Tensor,
quantize_params: QuantizeAffineParams2,
) -> torch.Tensor:
quantized_tensor = get_quantized_representation(input, quantize_params)
dequantized_tensor = dequantize(quantized_tensor, quantize_params)
mark_quantize_affine(
dequantized_tensor,
quantize_params.scale,
quantize_params.zero_point,
quantize_params.num_bits,
)
return dequantized_tensor
@staticmethod
def backward(ctx: Any, grad_output: Any) -> Any:
"""We will approximate the gradient as the identity"""
return grad_output, None
def quantize_affine_function_continuous(
input: torch.Tensor,
quantize_params: QuantizeAffineParams2,
) -> torch.Tensor:
quantized_tensor = get_quantized_representation(input, quantize_params)
dequantized_tensor = dequantize(quantized_tensor, quantize_params)
mark_quantize_affine(
dequantized_tensor,
quantize_params.scale,
quantize_params.zero_point,
quantize_params.num_bits,
)
return dequantized_tensor
def get_qmin_qmax(num_bits):
return -(2 ** (num_bits - 1)), 2 ** (num_bits - 1) - 1
def get_quantization_params(
rmin: float,
rmax: float,
num_bits: int = 8,
) -> QuantizeAffineParams2:
"""Returns QuantizeAffineParams for a data range [rmin, rmax].
The range must include 0 otherwise that's a failure. The scale and
zero_point are picked such that the error is quantization error is
minimized.
Arguments:
rmin (float): The data minimum point. Numbers smaller than rmin would
not be representible by the quantized schema.
rmax (float): The data maximum point. Numbers bigger than rmax would
not be representible by the quantized schema.
dtype (optional, np.dtype): The dtype that should be used to represent
the individual numbers after quantization. Only np.uint8 is
supported.
"""
if rmin > rmax:
raise ValueError("Got rmin (={}) > rmax (={}).".format(rmin, rmax))
if rmin > 0 or rmax < 0:
raise ValueError(
"The data range ([{}, {}]) must always include "
"0.".format(rmin, rmax)
)
if rmin == rmax == 0.0:
# Special case: all values are zero.
return QuantizeAffineParams2(0, 0, num_bits)
# Scale is floating point and is (rmax - rmin) / (qmax - qmin) to map the
# length of the ranges. For zero_point, we solve the following equation:
# rmin = (qmin - zero_point) * scale
qmin, qmax = get_qmin_qmax(num_bits)
scale = (rmax - rmin) / (qmax - qmin)
zero_point = qmin - (rmin / scale)
zero_point = np.clip(round(zero_point), qmin, qmax).astype(np.int32)
quantize_params = QuantizeAffineParams2(scale, zero_point, num_bits)
# We must ensure that zero is exactly representable with these quantization
# parameters. This is easy enough to add a self-check for.
quantized_zero = quantize(np.array([0.0]), quantize_params)
dequantized_zero = dequantize(quantized_zero, quantize_params)
if dequantized_zero.item() != 0.0:
raise ValueError(
f"Quantization parameters are invalid: scale={scale}, zero={zero_point}. "
f"Can't exactly represent zero."
)
return quantize_params
def quantize_affine_given_quant_params(
input: torch.Tensor,
quantize_params: QuantizeAffineParams2,
) -> torch.Tensor:
"""Get a quantizable approximation of a float tensor given quantize param.
This function does not quantize the float tensor @input, but only adjusts it
such that the returned float tensor has an exact quantized representation.
This is a function that we want to use at training time to quantize biases
and other parameters whose quantization schema is enforced by other
parameteres.
In forward pass, this function is equivalent to
dequantize(get_quantized_representation(input, quantize_param))
However, in backward pass, this function operates as identity, making it
ideal to be a part of the training forward pass.
"""
return QuantizeAffineFunction.apply(input, quantize_params)
def quantize(
arr: np.ndarray, quantize_params: QuantizeAffineParams2
) -> np.ndarray:
"""Quantize a floating point array with respect to the quantization params.
Arguments:
arr (np.ndarray): The floating point data to quantize.
quantize_params (QuantizeAffineParams): The quantization parameters
under which the data should be quantized.
"""
scale = quantize_params.scale
zero_point = quantize_params.zero_point
num_bits = quantize_params.num_bits
if scale == 0:
# Special case, all elements are zeros.
if zero_point != 0:
raise ValueError(
"The given QuantizeAffineParams (={}) has a non-zero zero point"
" with a scale of 0.".format(quantize_params)
)
return np.zeros_like(arr, dtype=np.int32)
qmin, qmax = get_qmin_qmax(num_bits)
reciprocal = 1 / scale
quantized_values = (arr * reciprocal).round() + zero_point
quantized_values = quantized_values.clip(qmin, qmax)
return quantized_values
def dequantize(
q_arr: np.ndarray,
quantize_params: QuantizeAffineParams2,
) -> np.ndarray:
"""Dequantize a fixed point array with respect to the quantization params.
Arguments:
q_arr (np.ndarray): The quantized array to dequantize. It's dtype must
match quantize_params.
quantize_params (QuantizeAffineParams): The quantization parameters
under which the data should be dequantized.
"""
zero_point = quantize_params.zero_point
scale = quantize_params.scale
return (q_arr - zero_point) * scale
def quantize_affine(
input: torch.Tensor,
min_value: Optional[numbers.Real] = None,
max_value: Optional[numbers.Real] = None,
num_bits: int = None,
) -> torch.Tensor:
"""Return a quantizable approximation of a float tensor @input.
This function does not quantize the float tensor @input, but only adjusts it
such that the returned float tensor has an exact quantized representation.
This is a function that we want to use at training time to quantize weights
and activations.
Arguments:
input (Tensor): The input float Tensor to be quantized.
min_value (scalar): The running min value (possibly averaged).
max_value (scalar): The running max value (possibly averaged).
num_bits (numpy.dtype): The number of bits.
"""
if num_bits is None:
raise ValueError("num_bits must be supplied")
if min_value is None:
# Force include 0 in our calculation of min_value.
min_value = min(input.min().item(), 0.0)
if max_value is None:
# Force include 0 in our calculation of max_value.
max_value = max(input.max().item(), 0.0)
quantize_params = get_quantization_params(min_value, max_value, num_bits)
return QuantizeAffineFunction.apply(input, quantize_params)
class QuantizeAffine(nn.Module):
"""Pytorch quantize_affine layer for quantizing layer outputs.
This layer will keep a running max and min, which is used to compute a scale
and zero_point for the quantization. Note that it is not always desirable
to start the quantization immediately while training.
Arguments:
momentum (scalar): The amount of averaging of min and max bounds.
This value should be in the range [0.0, 1.0].
iteration_delay (scalar): The number of batches to wait before starting
to quantize.
"""
def __init__(
self,
momentum=0.1,
iteration_delay=0,
num_bits=8,
quantizer_freeze_min_max=False,
):
super().__init__()
self.momentum = momentum
self.iteration_delay = iteration_delay
self.increment_counter = False
self.num_bits = num_bits
self.register_buffer("running_min_value", torch.tensor(0.0))
self.register_buffer("running_max_value", torch.tensor(0.0))
self.register_buffer(
"iteration_count", torch.zeros([1], dtype=torch.int32).squeeze()
)
self.quantizer_freeze_min_max = quantizer_freeze_min_max
def __repr__(self):
return (
f"{self.__class__.__name__}(running_min="
f"{self.running_min_value}, running_max="
f"{self.running_max_value}, freeze_min_max="
f"{self.quantizer_freeze_min_max}, num_bits={self.num_bits})"
)
def update_num_bits(self, num_bits):
self.num_bits = num_bits
def forward(self, input, recomp_bn_stats=False, override_alpha=False):
if (
self.training
and self.is_active()
and not self.quantizer_freeze_min_max
):
# Force include 0 in min_value and max_value calculation.
min_value = min(input.min().item(), 0)
max_value = max(input.max().item(), 0)
if self.iteration_count == self.iteration_delay:
new_running_min_value = min_value
new_running_max_value = max_value
else:
new_running_min_value = (
1.0 - self.momentum
) * self.running_min_value.item() + self.momentum * min_value
new_running_max_value = (
1.0 - self.momentum
) * self.running_max_value.item() + self.momentum * max_value
self.running_min_value.fill_(new_running_min_value)
self.running_max_value.fill_(new_running_max_value)
if self.is_active():
output = quantize_affine(
input,
self.running_min_value.item(),
self.running_max_value.item(),
self.num_bits,
)
else:
output = input
if self.training and self.increment_counter:
self.iteration_count.fill_(self.iteration_count.item() + 1)
return output
def is_active(self):
if self.training:
return self.iteration_count >= self.iteration_delay
# If evaluating, always run quantization:
return True
| [
"mchorton@apple.com"
] | mchorton@apple.com |
6c708d71414961bfd27dd63946aaa70d181350d5 | 6dc463ce97fc275787cfdef563317f3f7e4f5fcf | /radio_table_widget_app/widgets.py | 557a613116b686330885748746143cf0bdc904d1 | [] | no_license | chapkovski/table_radio_widget | 7ea7506d801213cb24a832096fbf88ab7eb89c92 | 320a2b2f5462c6abe8bd0a355b1b4ac8defe3adf | refs/heads/master | 2020-03-22T09:29:23.298900 | 2018-07-06T17:24:23 | 2018-07-06T17:24:23 | 139,840,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | from django.forms import RadioSelect
class TableRadio(RadioSelect):
template_name = 'widgets/multiple_input.html'
option_template_name = 'widgets/input_option.html'
def __init__(self, top_row=None, bottom_row=None, attrs=None, choices=(), ):
self.top_row = top_row
self.bottom_row = bottom_row
return super().__init__(attrs, choices)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['top_row'] = self.top_row
context['bottom_row'] = self.bottom_row
context['col_width'] = 100 / len(self.choices)
return context
| [
"chapkovski@gmail.com"
] | chapkovski@gmail.com |
4afd37c750cfc496c579ea48617490c7e9cfc5cf | 4f428027c235d4567d436efae6861200d8ae661c | /easypadel/templatetags/mathtools.py | 39d19eb72dabf777fe2cc6b47ad0676bdc21c50f | [] | no_license | jorgeron/mytfg | 058008225f73cfb72c4af307162f7d14e913e2f1 | 879571674d7845c46520a554ac27efe2f10fbedc | refs/heads/master | 2021-01-20T14:54:18.755250 | 2017-06-01T14:21:39 | 2017-06-01T14:21:39 | 82,786,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | from django import template
register = template.Library()
@register.simple_tag()
def porcentaje(parte, total, *args, **kwargs):
return (parte*100)/total | [
"jorgeron1993@hotmail.com"
] | jorgeron1993@hotmail.com |
e3bf9ce6464b09b76421075ddc04b704216d9611 | 2625c81908b8ba4f9acd7287f19aa43897a01490 | /1. MNIST and softmax/1.MNIST_softmax.py | 3873f53606711e1f5c00128a4bde4ab1d42c5a78 | [] | no_license | George-wu509/Deep-learning-team-tutorial | 534c0a5662303e758351e768255a00b311bedd00 | b44a66301dd218e8743c4389c167304e4a75132a | refs/heads/master | 2020-12-24T21:45:13.265592 | 2016-05-05T03:18:34 | 2016-05-05T03:18:34 | 57,171,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,732 | py |
# ------ This is Deep learning team tutorial example1: MNIST and softmax regression -----
# Download MNIST data from server using input_data.py
# You can use the following link to get input_data.py code:
# https://github.com/tensorflow/tensorflow/blob/r0.8/tensorflow/examples/tutorials/mnist/input_data.py
# Use input_data.py to download MNIST dataset(you should have input_data.py in folder)
import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
# ------ example1: MNIST and softmax regression -----
import tensorflow as tf
# data x, weight W and bias b init values
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
# Model output y
y = tf.nn.softmax(tf.matmul(x, W) + b)
# Using cross entropy to evaluate and train the model
# Definre cross_entropy and use gradient descent to optimize(min) the parameters
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
init = tf.initialize_all_variables()
# Define one session and initializes variables
sess = tf.Session()
sess.run(init)
# Train model 1000 times
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Evaluate the model
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
# Reference: https://www.tensorflow.org/versions/r0.8/tutorials/mnist/beginners/index.html
| [
"wu509@purdue.edu"
] | wu509@purdue.edu |
29a18faaa50dc981225e58ae908f3d2125f8b8f8 | 0e65bf60d9ffc64bbb8e300b90fa87b27316eb9b | /app/view/dialog.py | 9697a0346dae4342898309a67634342b626f254a | [] | no_license | artu-hnrq/Ine5420 | ee2996f7c096982f375b658beb90bab7aa3bc786 | b32c8841dcfc02be4c2c7517a7f810899ae11a85 | refs/heads/master | 2022-09-07T20:30:34.349925 | 2020-03-09T02:52:41 | 2020-03-09T02:52:41 | 202,533,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 298 | py | from app.view import UserInterface
class Dialog(UserInterface):
def __init__(self, title, text):
super().__init__()
self.window.set_title(title)
self.builder.get_object("text").set_text(text)
self.builder.get_object("ok_button").connect("clicked", self.quit)
| [
"Arthur.Henrique.Della.Fraga@gmail.com"
] | Arthur.Henrique.Della.Fraga@gmail.com |
8e22d1ea23f7ca524327b2070d521659d9c3922e | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /3gziWsCxqGwGGZmr5_11.py | 6392e84ea4d1c88aaa1154379e9f4d945b640bab | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 763 | py | """
Given two integers as arguments, create a function that finds the largest
prime within the range of the two integers.
### Examples
fat_prime(2, 10) ➞ 7
# range [2, 3, 4, 5, 6, 7, 8, 9, 10] and the largest prime is 7.
fat_prime(10, 2) ➞ 7
# [10, 9, 8, 7, 6, 5, 4, 3, 2] and the largest prime is 7.
fat_prime(4, 24) ➞ 23
# range [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24] the largest prime is 23.
### Notes
All numbers will be positive integers.
"""
def fat_prime(a, b):
y = min(a,b)
z = max(a,b)
for i in range(z,y,-1):
if is_prime(i):
return i
return None
def is_prime(n):
for i in range(2,n):
if n % i == 0:
return False
return True
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
218480d8de1b32721fa9093e05f3940ed09e8600 | bf9a1254cfb4af85c62b86eba21b9ad0495dd0a7 | /cal_SaD_SaM/read_NA_NV.py | c9706b0f494f82fa33e5bd5eabe03dece36611ba | [] | no_license | garyhsieh15/cal_eq_force | 2554aa51f475298241434bc9225cc9e41e26c2cf | 4f12fde4c6903484f98f999248f1e1ad83e83d0e | refs/heads/master | 2021-08-10T12:35:37.594490 | 2017-11-12T15:05:15 | 2017-11-12T15:05:15 | 110,441,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,196 | py | #!/usr/bin/env python3
import sys
''' ----------------------------------------------------------------------------
1. 輸入斷層的位置以及距離即可以得到NA and NV
2. NA and NV包含了, dNA, dNV, mNA and mNV.
3. 程式的想法是用if來先行判斷距離的多少, 再用斷層區域來做進一步的判定其因子為多少.
---------------------------------------------------------------------------- '''
print("\t1. 近車籠埔斷層調整因子 N A 與 NV\n \
2. 近獅潭與神卓山斷層調整因子 N A 與 NV\n \
3. 近屯子腳斷層調整因子 N A 與 NV\n \
4. 近梅山斷層調整因子 N A 與 NV\n \
5. 近新化斷層調整因子 N A 與 NV\n \
6. 近大尖山與觸口斷層調整因子 N kA 與 NV\n \
7. 近花東地區斷層(含米崙、玉里、池上與奇美斷層)調整因子 N A 與 NV")
fault_zone = input("input fault zone, 1 ~ 7:")
if int(fault_zone) > 7:
print("inputing item is out of range!!")
exit()
r_distance = input("input distance, ex:2 (km):")
if float(r_distance) <= 2:
if int(fault_zone) == 1:
dNA = 1.23
dNV = 1.36
mNA = 1.25
mNV = 1.5
elif int(fault_zone) == 2:
dNA = 1.28
dNV = 1.33
mNA = 1.26
mNV = 1.42
elif int(fault_zone) == 3:
dNA = 1.28
dNV = 1.31
mNA = 1.26
mNV = 1.42
elif int(fault_zone) == 4:
dNA = 1.37
dNV = 1.44
mNA = 1.3
mNV = 1.48
elif int(fault_zone) == 5:
dNA = 1.23
dNV = 1.15
mNA = 1.29
mNV = 1.3
elif int(fault_zone) == 6:
dNA = 1.15
dNV = 1.15
mNA = 1.21
mNV = 1.42
else:# int(fault_zone) == 7:
dNA = 1.42
dNV = 1.58
mNA = 1.32
mNV = 1.58
elif 2 < float(r_distance) <= 5:
if int(fault_zone) == 1:
dNA = 1.16
dNV = 1.32
mNA = 1.2
mNV = 1.45
elif int(fault_zone) == 2:
dNA = 1.2
dNV = 1.27
mNA = 1.18
mNV = 1.32
elif int(fault_zone) == 3:
dNA = 1.2
dNV = 1.25
mNA = 1.17
mNV = 1.32
elif int(fault_zone) == 4:
dNA = 1.28
dNV = 1.36
mNA = 1.2
mNV = 1.36
elif int(fault_zone) == 5:
dNA = 1.06
dNV = 1.05
mNA = 1.1
mNV = 1.15
elif int(fault_zone) == 6:
dNA = 1.08
dNV = 1.1
mNA = 1.17
mNV = 1.35
else:# int(fault_zone) == 7:
dNA = 1.37
dNV = 1.53
mNA = 1.26
mNV = 1.48
elif 5 < float(r_distance) <= 8:
if int(fault_zone) == 1:
dNA = 1.07
dNV = 1.22
mNA = 1.1
mNV = 1.3
elif int(fault_zone) == 2:
dNA = 1.1
dNV = 1.1
mNA = 1.05
mNV = 1.15
elif int(fault_zone) == 3:
dNA = 1.1
dNV = 1.15
mNA = 1.05
mNV = 1.15
elif int(fault_zone) == 4:
dNA = 1.15
dNV = 1.2
mNA = 1.05
mNV = 1.15
elif int(fault_zone) == 5:
dNA = 1
dNV = 1
mNA = 1
mNV = 1
elif int(fault_zone) == 6:
dNA = 1
dNV = 1.03
mNA = 1.05
mNV = 1.15
else:# int(fault_zone) == 7:
dNA = 1.28
dNV = 1.38
mNA = 1.1
mNV = 1.3
elif 8 < float(r_distance) <= 10:
if int(fault_zone) == 1:
dNA = 1.03
dNV = 1.1
mNA = 1.03
mNV = 1.15
elif int(fault_zone) == 2:
dNA = 1
dNV = 1
mNA = 1
mNV = 1
elif int(fault_zone) == 3:
dNA = 1.1
dNV = 1.15
mNA = 1.05
mNV = 1.15
elif int(fault_zone) == 4 or int(fault_zone) == 5 or int(fault_zone) == 6:
dNA = 1
dNV = 1
mNA = 1
mNV = 1
else:# int(fault_zone) == 7:
dNA = 1.14
dNV = 1.2
mNA = 1.02
mNV = 1.16
elif 10 < float(r_distance) <= 12:
if int(fault_zone) == 1:
dNA = 1.03
dNV = 1.1
mNA = 1.03
mNV = 1.15
elif int(fault_zone) == 2 or int(fault_zone) == 3 or int(fault_zone) == 4 or int(fault_zone) == 5 or int(fault_zone) == 6:
dNA = 1
dNV = 1
mNA = 1
mNV = 1
else:# int(fault_zone) == 7:
dNA = 1.14
dNV = 1.2
mNA = 1.02
mNV = 1.16
elif 12 < float(r_distance) <= 15:
if int(fault_zone) == 1 or int(fault_zone) == 2 or int(fault_zone) == 3 or int(fault_zone) == 4 or int(fault_zone) == 5 or int(fault_zone) == 6:
dNA = 1
dNV = 1
mNA = 1
mNV = 1
else:# int(fault_zone) == 7:
dNA = 1
dNV = 1
mNA = 1
mNV = 1.05
elif float(r_distance) > 15:
if int(fault_zone) == 1 or int(fault_zone) == 2 or int(fault_zone) == 3 or\
int(fault_zone) == 4 or int(fault_zone) == 5 or int(fault_zone) == 6 or int(fault_zone) == 7:
dNA = 1
dNV = 1
mNA = 1
mNV = 1
else:
print("the distance is out of range!!")
print("dNA: ", dNA)
print("dNV: ", dNV)
print("mNA: ", mNA)
print("mNV: ", mNV)
| [
"garyhsieh15@yahoo.com"
] | garyhsieh15@yahoo.com |
c94f3a57cfa500b7fe0a09ea6af43fa08b118a3b | ba11824525c7e41da98f343517e3c47f98bc81cf | /SpectralClusteringAndKMeans.py | 89979be1f7d7655fbbf9563ae4bb8a7032db4491 | [] | no_license | mani447/PCAImplementation_Kmeans_SpectralClustering | 4320fe56377a09f521b3fd3f78f3453a5e51bf9a | 580fec27bfb19def7149f232984488ca103cd484 | refs/heads/main | 2023-07-17T06:44:56.113871 | 2021-09-01T16:52:29 | 2021-09-01T16:52:29 | 402,136,454 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,223 | py | import numpy as np
from numpy import matlib
import pandas as pd
import math
from sklearn.cluster import KMeans
from matplotlib import pyplot as plt
import cv2
def LP_Matrix_Evaluate(similarity_matrix):
diagonal_matrix = np.zeros(similarity_matrix.shape)
sum_vec = np.sum(similarity_matrix, axis=1)
np.fill_diagonal(diagonal_matrix, sum_vec)
return diagonal_matrix - similarity_matrix
def Similarity_Val(point1, point2, sigma):
similarity = math.exp((-1 / (2 * (sigma ** 2))) * (np.linalg.norm(point1 - point2, ord=2) ** 2))
return similarity
def Similarity_Matrix_Evaluate(data_array, sigma):
[no_of_points, _] = data_array.shape
similarity_matrix = np.zeros((no_of_points, no_of_points))
for i in range(0, no_of_points):
for j in range(i, no_of_points):
similarity_matrix[i, j] = Similarity_Val(data_array[i], data_array[j], sigma)
similarity_matrix[j, i] = Similarity_Val(data_array[i], data_array[j], sigma)
return similarity_matrix
def Similarity_Matrix_Image_Evaluate(data_array, sigma):
[no_of_points, _] = data_array.shape
cross_mul_matrix = data_array@np.transpose(data_array)
square_matrix = np.power(data_array, 2)
col_square_matrix = matlib.repmat(square_matrix, 1, no_of_points)
row_square_matrix = matlib.repmat(np.transpose(square_matrix), no_of_points, 1)
similarity_matrix = col_square_matrix+row_square_matrix - 2 * cross_mul_matrix
similarity_matrix = np.exp((-1/(2*(sigma**2)))*similarity_matrix)
return similarity_matrix
class spectral_clustering:
def __init__(self):
self.eigen_vectors = None
self.eigen_values = None
def get_clusters(self, k):
eigen_mat = self.eigen_vectors[:, 0:k]
kmeans = KMeans(n_clusters=k)
clusters = kmeans.fit_predict(eigen_mat)
return clusters
def fit(self, data_array, sigma):
similarity_mat = Similarity_Matrix_Evaluate(data_array, sigma)
laplacian_mat = LP_Matrix_Evaluate(similarity_mat)
eigen_values, eigen_vectors = np.linalg.eigh(laplacian_mat)
sort_index = np.argsort(eigen_values)
self.eigen_vectors = eigen_vectors[:, sort_index]
self.eigen_values = eigen_values[sort_index]
def image_fit(self, data_array, sigma):
similarity_mat = Similarity_Matrix_Image_Evaluate(data_array, sigma)
laplacian_mat = LP_Matrix_Evaluate(similarity_mat)
eigen_values, eigen_vectors = np.linalg.eigh(laplacian_mat)
sort_index = np.argsort(eigen_values)
self.eigen_vectors = eigen_vectors[:, sort_index]
self.eigen_values = eigen_values[sort_index]
def circs():
data = np.zeros((100,2))
y = 0
i = 0
while(i<2*math.pi):
data[y, 0] = math.cos(i)
data[y, 1] = math.sin(i)
y = y+1
i = i+(math.pi/25)
i = 0
while(i<2*math.pi):
data[y, 0] = 2*math.cos(i)
data[y, 1] = 2*math.sin(i)
y = y+1
i = i+(math.pi/25)
return data
data = circs()
sigmas = [0.01, 0.05, 0.1, 1, 5, 10, 50, 100]
for sigma in sigmas:
Spectral_Object = spectral_clustering()
Spectral_Object.fit(data, sigma)
print("Eigen Vectors of the data are:")
print(Spectral_Object.eigen_vectors)
Clusters_Spectral = Spectral_Object.get_clusters(2)
Kmeans_Object = KMeans(n_clusters=2)
Clusters_Kmeans = Kmeans_Object.fit_predict(data)
fig1, ax1 = plt.subplots()
scatter1 = ax1.scatter(x=data[:, 0], y=data[:,1], c=Clusters_Spectral)
legend1 = ax1.legend(*scatter1.legend_elements(),
loc="upper right", title="Spectral Clusters")
ax1.add_artist(legend1)
plt.savefig('Spectral_Clusters_a' + str(sigma) + '.jpg')
plt.show()
fig2, ax2 = plt.subplots()
scatter2 = plt.scatter(x=data[:, 0], y=data[:, 1], c=Clusters_Kmeans)
legend2 = ax2.legend(*scatter2.legend_elements(),
loc="upper right", title="KMeans Clusters")
ax2.add_artist(legend2)
plt.savefig('Kmeans_Clusters_a' + str(sigma) + '.jpg')
plt.show()
sigmas = [0.701, 0.702, 0.703, 0.704, 0.705, 0.706, 0.707, 0.708, 0.709, 0.71]
image_data = cv2.imread(r"bw.jpg", cv2.IMREAD_GRAYSCALE)
[m, n] = image_data.shape
image_data = np.array(image_data, dtype=float)
image_flattened_data = np.ravel(image_data)
image_flattened_data = np.reshape(image_flattened_data, (image_flattened_data.shape[0], 1))
kmeans = KMeans(n_clusters=2)
Clusters_Kmeans = kmeans.fit_predict(image_flattened_data)
Image_Kmeans = np.uint8(255 * Clusters_Kmeans.reshape([m, n]))
plt.imsave("Kmeans_Clusters_b.jpg", Image_Kmeans)
cv2.imwrite(r"Kmeans_bw.jpg", Image_Kmeans)
for sigma in sigmas:
Spectral_Object = spectral_clustering()
Spectral_Object.image_fit(image_flattened_data, sigma)
Clusters_Spectral = Spectral_Object.get_clusters(2)
#Clusters_Spectral = np.array([1 if x == 0 else 0 for x in Clusters_Spectral])
Image_Spectral = np.uint8(255 * Clusters_Spectral.reshape([m, n]))
plt.imsave("Spectral_Clusters_b" + str(sigma) + ".jpg", Image_Spectral)
cv2.imwrite(r"Spectral_bw" + str(sigma)+".jpg", Image_Spectral) | [
"manideepreddy580@gmail.com"
] | manideepreddy580@gmail.com |
2a87843282159e9c6443e44c4633d6c6cd1d6e17 | e192f4a4a829d6f58daf04782de0ba7ba87958d3 | /pytorch implementation/pytorch/data.py | 4a7e822b28708546ca34d88af53fe4c8c8d613cf | [] | no_license | Carachias/DGCNN_VoxML_Classifier | 40d4bd4d5b70ae022d0678e90bb355cd17149d63 | 24d79174942c1158fd3a3554081b92e2e185cd95 | refs/heads/master | 2023-04-16T18:48:45.081413 | 2021-05-07T13:42:17 | 2021-05-07T13:42:17 | 361,211,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,004 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author: Yue Wang
@Contact: yuewangx@mit.edu
@File: data.py
@Time: 2018/10/13 6:21 PM
"""
import os
import sys
import glob
import h5py
import numpy as np
import torch
from torch.utils.data import Dataset
def download():
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = os.path.join(BASE_DIR, 'data')
if not os.path.exists(DATA_DIR):
os.mkdir(DATA_DIR)
if not os.path.exists(os.path.join(DATA_DIR, 'modelnet40_ply_hdf5_2048')):
www = 'https://shapenet.cs.stanford.edu/media/modelnet40_ply_hdf5_2048.zip'
zipfile = os.path.basename(www)
os.system('wget --no-check-certificate %s; unzip %s' % (www, zipfile))
os.system('mv %s %s' % (zipfile[:-4], DATA_DIR))
os.system('rm %s' % (zipfile))
def load_data(partition):
download()
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = os.path.join(BASE_DIR, 'data')
all_data = []
all_label = []
for h5_name in glob.glob(os.path.join(DATA_DIR, 'modelnet40_ply_hdf5_2048', 'ply_data_%s*.h5'%partition)):
f = h5py.File(h5_name)
data = f['data'][:].astype('float32')
label = f['label'][:].astype('int64')
f.close()
all_data.append(data)
all_label.append(label)
#print('data1 loaded!', all_label[0][1])
#print('data2 loaded!', all_label[1][1])
#print('data3 loaded!', len(all_label[2]))
#print('data4 loaded!', len(all_label[3]))
#print('data5 loaded!', len(all_label[4]))
all_data = np.concatenate(all_data, axis=0)
all_label = np.concatenate(all_label, axis=0)
print('all data loaded!', all_data.shape)
print('all labels loaded!', all_label.shape)
return all_data, all_label
def loaddata():
dsetdir = '/content/dgcnn/pytorch/ftdat/dset/'
classnamelist = next(os.walk(dsetdir))[1]
print(classnamelist)
all_data = []
all_labels = []
for classnr in classnamelist:
currdir = dsetdir + str(classnr)
print(currdir)
filenames = [f for f in os.listdir(currdir) if f.endswith('.pt')]
file_paths = []
for fname in filenames:
file_paths.append(currdir + '/' + fname)
#print ('class:', classnr, 'fname:', fname)
for file in file_paths:
single_data = (torch.load(file)).tolist()
single_label = int(classnr)
#print(len(single_data))
all_data.append(single_data)
all_labels.append(single_label)
all_data = (torch.tensor(all_data)) #.permute(0,2,1)
all_labels = (torch.tensor(all_labels))
print('all data loaded!', all_data.shape)
print('all labels loaded!', all_labels.shape)
print(all_labels)
return all_data, all_labels
def loadsdata():
dsetdir = '/content/dgcnn/pytorch/ftdat/valdset/'
classnamelist = next(os.walk(dsetdir))[1]
print(classnamelist)
all_data = []
all_labels = []
for classnr in classnamelist:
currdir = dsetdir + str(classnr)
print(currdir)
filenames = [f for f in os.listdir(currdir) if f.endswith('.pt')]
file_paths = []
for fname in filenames:
file_paths.append(currdir + '/' + fname)
#print ('class:', classnr, 'fname:', fname)
for file in file_paths:
single_data = (torch.load(file)).tolist()
single_label = int(classnr)
#print(len(single_data))
all_data.append(single_data)
all_labels.append(single_label)
all_data = (torch.tensor(all_data)) #.permute(0,2,1)
all_labels = (torch.tensor(all_labels))
print('all data loaded!', all_data.shape)
print('all labels loaded!', all_labels.shape)
print(all_labels)
return all_data, all_labels
def translate_pointcloud(pointcloud):
xyz1 = np.random.uniform(low=2./3., high=3./2., size=[3])
xyz2 = np.random.uniform(low=-0.2, high=0.2, size=[3])
translated_pointcloud = np.add(np.multiply(pointcloud, xyz1), xyz2) #.astype('float32')
translated_pointcloud = translated_pointcloud.type(torch.FloatTensor)
return translated_pointcloud
def jitter_pointcloud(pointcloud, sigma=0.01, clip=0.02):
N, C = pointcloud.shape
pointcloud += np.clip(sigma * np.random.randn(N, C), -1*clip, clip)
return pointcloud
class ModelNet40(Dataset):
def __init__(self, num_points, partition='train'):
self.data, self.label = load_data(partition)
self.num_points = num_points
self.partition = partition
def __getitem__(self, item):
pointcloud = self.data[item][:self.num_points]
label = self.label[item]
if self.partition == 'train':
pointcloud = translate_pointcloud(pointcloud)
np.random.shuffle(pointcloud)
return pointcloud, label
def __len__(self):
return self.data.shape[0]
class FT10(Dataset):
def __init__(self, num_points):
self.data, self.label = loaddata()
self.num_points = num_points
def __getitem__(self, item):
pointcloud = self.data[item][:self.num_points]
label = self.label[item]
pointcloud = translate_pointcloud(pointcloud)
np.random.shuffle(pointcloud)
return pointcloud, label
def __len__(self):
return self.data.shape[0]
class FT11(Dataset):
def __init__(self, num_points):
self.data, self.label = loadsdata()
self.num_points = num_points
def __getitem__(self, item):
pointcloud = self.data[item][:self.num_points]
pointcloud = pointcloud.type(torch.FloatTensor)
label = self.label[item]
return pointcloud, label
def __len__(self):
return self.data.shape[0]
if __name__ == '__main__':
train = ModelNet40(1024)
test = ModelNet40(1024, 'test')
ftr = FT10(1024)
fte = FT11(1024)
for data, label in train:
print(data.shape)
print(label.shape)
| [
"carachias@gmail.com"
] | carachias@gmail.com |
20a405147dc239db1af8b180b78f4310c43f38b0 | ae66ad38a7b19c01f1099d671dd127716a5d4c34 | /accounts/migrations/0025_auto_20180511_1233.py | e1d9e06c37affbd3e572a0f042dd681de84ec054 | [] | no_license | selbieh/django-freelacer-website | 6fd1eb009e9b30738bfa59fa78f530144b273231 | 0971a7fc3dc7e63a1909bb6adf3a84d7d9083324 | refs/heads/master | 2022-11-22T19:07:48.470928 | 2019-11-24T12:24:26 | 2019-11-24T12:24:26 | 172,359,908 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | # Generated by Django 2.0.4 on 2018-05-11 10:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0024_auto_20180511_1229'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='pic',
field=models.ImageField(upload_to='profile/profile_pic'),
),
migrations.AlterField(
model_name='userprofile',
name='resume',
field=models.FileField(upload_to='profile/resume'),
),
]
| [
"selbieh@gmail.com"
] | selbieh@gmail.com |
02572ac0d7a899647d2e88f1a95a0f55337c7e01 | fc1c1e88a191b47f745625688d33555901fd8e9a | /meraki/models/protocol_4_enum.py | a5a84ca844f12ecbee618d6942e1886545423e86 | [
"MIT",
"Python-2.0"
] | permissive | RaulCatalano/meraki-python-sdk | 9161673cfd715d147e0a6ddb556d9c9913e06580 | 9894089eb013318243ae48869cc5130eb37f80c0 | refs/heads/master | 2022-04-02T08:36:03.907147 | 2020-02-03T19:24:04 | 2020-02-03T19:24:04 | 416,889,849 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | # -*- coding: utf-8 -*-
"""
meraki
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class Protocol4Enum(object):
"""Implementation of the 'Protocol4' enum.
The protocol of the incoming packet. Can be one of "ANY", "TCP" or "UDP".
Default value is "ANY"
Attributes:
ANY: TODO: type description here.
TCP: TODO: type description here.
UDP: TODO: type description here.
"""
ANY = 'ANY'
TCP = 'TCP'
UDP = 'UDP'
| [
"api-pm@meraki.com"
] | api-pm@meraki.com |
dbe6152c90a1d121fb9efa404084e4479ccfa844 | d245d44d742aee4a08c45263eda6ec42bb552dbd | /prac_1/sales_bonus.py | 65643e63980b21058ced7d680f128bb1c4b20996 | [] | no_license | mpearce19/CP1404practicals | d3b6afeddabd7193ed08ce17d40460aee8173126 | 270aed43ba7a8051cab83b13ba7b888ba222fbaa | refs/heads/master | 2023-01-05T11:41:28.147753 | 2020-11-05T00:58:14 | 2020-11-05T00:58:14 | 289,182,956 | 0 | 0 | null | 2020-09-09T01:07:47 | 2020-08-21T05:11:57 | Python | UTF-8 | Python | false | false | 403 | py | # 1.
"""
Program to calculate and display a user's bonus based on sales.
If sales are under $1,000, the user gets a 10% bonus.
If sales are $1,000 or over, the bonus is 15%.
"""
sales = float(input("Enter sales: $"))
while sales >= 0:
if sales < 1000:
bonus = sales * 0.10
else:
bonus = sales * 0.15
print(f"The bonus is {bonus}")
sales = float(input("Enter sales: $"))
| [
"mitchell.pearce@my.jcu.edu.au"
] | mitchell.pearce@my.jcu.edu.au |
6bb617f65ce38c6264839410620930fdf5f56e11 | 677734a1b188fc28d2af7848cd17923cd5a45871 | /PopulateCredential.py | f16b1da8f2f654883c331c204d8a97ffebe17b93 | [] | no_license | cyrexin/simplechatroom | dae66c3547dbef1cac2a12cd0eb219db2430d72c | 59797624df8ec780dbb3fa5e98aea05cdacc5736 | refs/heads/master | 2020-12-01T13:05:12.418441 | 2016-03-23T00:10:22 | 2016-03-23T00:10:22 | 54,007,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | from Encrypt import *
user_password = {
'columbia': '116bway',
'seas': 'winterisover',
'csee4119': 'lotsofassignments',
'foobar': 'passpass',
'windows': 'withglass',
'google': 'partofalphabet',
'facebook': 'wastetime',
'wikipedia': 'donation',
'network': 'seemsez'
}
f = open('user_pass.txt','w')
for username in user_password:
print 'username: %s' % username
print 'password: %s' % user_password[username]
f.write(username + ' ' + Encrypt.create_signature(user_password[username].strip()) + '\n')
f.close()
| [
"cyrexin@gmail.com"
] | cyrexin@gmail.com |
41520052906753d133afd2569d27beb0bd764902 | f2dc83ffed7f5f8fffc83ef66b78d0b64274f803 | /main_codes/main.py | fde287072ff2e8e239fa1c5b990bfb575ac42055 | [] | no_license | TarunGandotra/flask_ui | 42805a29f3b5b34e200311a9e76a30ce4f135dff | 22bb891f470409878a1b5a408f3741caef8bf418 | refs/heads/main | 2023-01-18T20:29:25.208084 | 2020-11-20T13:53:01 | 2020-11-20T13:53:01 | 314,558,144 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,224 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 17 14:49:58 2019
@author: Digiscape
# """
from fund_ref import fund
#from afn003 import AFN003
from Article_History import article_history
from BRX012 import Brx012
from rule_109 import Rule_109
from Che015 import CHE015_11
from rule_17 import Rule_17
from RULE_70 import rule_70
from rule_108 import Rule_108
from RULE_75 import rule_75
from Rule_39 import brx039
from Brx006 import BRX006
from Brx018 import BRX018
from RULE_83 import rule_83
from RULE_59 import rule_59
from RULE_7 import Rule_7
#from Corr_Check import corr_check wrong ack003
from Rule_31_32 import rule31
from Rule_67 import rule_67
from Rule_66 import rule_66
from BRX041 import brx041
from Rule87 import rule_87
from Rule_08_new import Rule_08
from Brx005a import BRX005A
from BRX030 import brx030
from Rule_71 import rule_71
from Brf091b import BRF091B
from RULE40 import rule40
from BRX032 import brx032
from rule_91 import Rule_91
from Xstb702 import XsTB702
from Xstb701 import XsTB701
from Rule_27 import AEU001A
from Brx022 import BRX022
from BRF037A import Brf037a
from brx042 import Brx042
from BRX029B import brx029b
from Rule_73 import rule_73
from Table_Header import table_header
from Brx024 import BRX024
from rule30 import BRX001a
from Brx011 import BRX011
from Brx019 import BRX019
from HIS003A import rule_HIS003A
from Tab018 import tab018
from Brx005b_35 import BRX005B_35
from Rule89 import Rule_89
from RULE132 import rule132
from RULE_78_138 import rule_78_138
from RULE_113 import Rule_113
from AUG001 import aug001
from TableRule import Table
from rule_111 import Rule_111
from Brx020b import BRX020B
from RULE_15_new import rule_15
from Brx007 import BRX007
#from TableRule import Table
from RULE_69 import rule_69
from Xsfm101 import XsFM101
from RULE135 import rule135
from afn003 import rule_afn003
from rule139_140 import Rule139_140
#from email_Vinay import Authors_Email
from Brx020a import BRX020A
from Aun009a import AUN009A
from Highlights import highlights
from rule_93 import Rule_93
from RULE41 import rule41
from RULE_79 import rule_79
from Rule_67 import rule_67
from RULE_80 import rule_80
from ack003_new import ack003
from RULE_114 import Rule_114
from Keywords import keywords
from Label import label
from RULE_149 import Rule_149
from RULE_96 import Rule_96
from RULE_16 import rule_16
from rule_111 import Rule_111
from RULE_128 import Rule_128
from RULE120 import rule120
from Brx005b_36_37 import BRX005B_36
from AEU003 import aeu003
from RULE144 import rule144
from Afo002a import AFO002A
from RULE130 import rule130
from Xsfg601 import XsFG601
from TAIL import Tail
#from fund_ref import fund
from RULE116 import rule116
from TAB021 import tab021
from TAB012 import tab012
from RULE_72 import rule_72
from BRX010_42_43 import BRX010_42
from Rule_CHE045 import CHE_045
from Rule_FUN002 import FUN_002
from double_bond import CHE011
from ellip import Ellip
from fors5 import BRF033_
from note import BRF037A
from source import DQO002
from triple_bond import CHE011_tri
from app001 import APP001
from ABS004 import abs004
from bs4 import BeautifulSoup
import pypyodbc
import datetime
from shutil import copy2
import sys
import docx
import os
from datetime import date
import socket
import logging
from configparser import ConfigParser
def load_configuration(file_path):
config = ConfigParser(allow_no_value=True)
config.read(file_path)
return config
config = load_configuration(r"C:\Users\digiscape\Desktop\batch\Batch_App_SmartQC\main_codes\file\settings.ini")
#print(config,'?'*100)
print(os.getcwd())
logging.basicConfig(filename=config["FILES"]["log"], filemode='a', format='%(asctime)s - %(message)s', level=logging.INFO)
logger = logging.getLogger()
logging.info(' **-------Logger start from here-------** '*4)
# config='C:\\Users\\digiscape\\Desktop\\log_file\\logger.log'
# logging.basicConfig(filename=config, filemode='a', format='%(asctime)s - %(message)s', level=logging.INFO)
# logger = logging.getLogger()
#print()
if len(sys.argv)>1:
path_for_file = sys.argv[1]
else:
path_for_file = sys.argv[0]
print(sys.argv,'+'*50)
def update_db_master(updated_list):
from datetime import datetime
connection = pypyodbc.connect('Driver={'+config["DATABASE"]["server"]+'};Server='+config["DATABASE"]["hostname"]+';Database='+config["DATABASE"]["database"]+';uid='+config["DATABASE"]["username"]+';pwd='+config["DATABASE"]["password"])
#connection = pypyodbc.connect('Driver={SQL Server};Server=172.16.0.61;Database=smart_qc;uid=sa;pwd=p@ssw0rd')
cursor = connection.cursor()
if(str(updated_list[7]!='null')):
cursor.execute("UPDATE uniqmaster SET ERRORCOUNT ="+str(updated_list[1])+" "+",TIME_TAKEN_IN_SEC ="+str(updated_list[2])+" "+",true_positive ="+str(updated_list[3])+" "+",true_negative ="+str(updated_list[4])+" "+",false_positive ="+str(updated_list[5])+" "+",false_negative ="+str(updated_list[6])+" "+",BATCH_ID ="+str(updated_list[7])+" "+" "+" WHERE UNIQITEMID ="+"\'"+updated_list[0]+"\'"+";")
cursor.execute("UPDATE uniqmaster SET USERID ="+"\'"+updated_list[8]+"\'"+" "+" WHERE UNIQITEMID ="+"\'"+updated_list[0]+"\'"+";")
else:
cursor.execute("UPDATE uniqmaster SET ERRORCOUNT ="+str(updated_list[1])+" "+",TIME_TAKEN_IN_SEC ="+str(updated_list[2])+" "+",true_positive ="+str(updated_list[3])+" "+",true_negative ="+str(updated_list[4])+" "+",false_positive ="+str(updated_list[5])+" "+",false_negative ="+str(updated_list[6])+" "+",BATCH_ID ="+str(updated_list[7])+" "+" "+" WHERE UNIQITEMID ="+"\'"+updated_list[0]+"\'"+";")
cursor.execute("UPDATE uniqmaster SET USERID ="+"\'"+updated_list[8]+"\'"+" "+" WHERE UNIQITEMID ="+"\'"+updated_list[0]+"\'"+";")
connection.commit()
return
def insert_db_master(out_list):
from datetime import datetime
connection = pypyodbc.connect('Driver={'+config["DATABASE"]["server"]+'};Server='+config["DATABASE"]["hostname"]+';Database='+config["DATABASE"]["database"]+';uid='+config["DATABASE"]["username"]+';pwd='+config["DATABASE"]["password"])
#connection = pypyodbc.connect('Driver={SQL Server};Server=172.16.0.61;Database=smart_qc;uid=sa;pwd=p@ssw0rd')
cursor = connection.cursor()
stmt_counter = "SELECT ARTICLE_EXECUTION_COUNTER FROM uniqmaster WHERE UNIQITEMID ="+"\'"+out_list[0]+"\'"+";"
return_curs = cursor.execute(stmt_counter)
k = return_curs.fetchone()
if k == None:
params = ['?' for item in out_list]
stmt= 'INSERT INTO uniqmaster (UNIQITEMID, USERID, JID, AID, STAGE,ITEMPROCESSINGDATE,ERRORCOUNT, ARTICLE_EXECUTION_COUNTER, ITEMPROCESSING_MODIFIED_DATE,TIME_TAKEN_IN_SEC) VALUES (%s);' % ','.join(params)
cursor.execute(stmt, out_list)
else:
count = k[0]+1
cursor.execute("UPDATE uniqmaster SET ERRORCOUNT ="+str(out_list[6])+" "+",TIME_TAKEN_IN_SEC ="+str(out_list[9])+" "+",ARTICLE_EXECUTION_COUNTER ="+str(count)+" "+",ITEMPROCESSING_MODIFIED_DATE = CONVERT(VARCHAR(50), getdate(),121) WHERE UNIQITEMID ="+"\'"+out_list[0]+"\'"+";")
connection.commit()
return
def insert_db_trans(out_list1):
connection = pypyodbc.connect('Driver={'+config["DATABASE"]["server"]+'};Server='+config["DATABASE"]["hostname"]+';Database='+config["DATABASE"]["database"]+';uid='+config["DATABASE"]["username"]+';pwd='+config["DATABASE"]["password"])
#connection = pypyodbc.connect('Driver={SQL Server};Server=172.16.0.61;Database=smart_qc;uid=sa;pwd=p@ssw0rd')
cursor = connection.cursor()
stmt_counter = "SELECT RULE_EXECUTION_COUNTER FROM uniqtransaction WHERE UNIQITEMID ="+"\'"+out_list1[0]+"\'"+" "+";"
return_curs = cursor.execute(stmt_counter)
k = return_curs.fetchone()
if k == None:
params = ['?' for item in out_list1]
stmt= 'INSERT INTO uniqtransaction (UNIQITEMID, CATEGORY, RULEID, RULE_ERROR_DESC,RULE_EXECUTION_COUNTER) VALUES (%s);' % ','.join(params)
cursor.execute(stmt, out_list1)
else:
#cursor.execute("UPDATE uniqtransaction SET RULE_EXECUTION_COUNTER = "+str(out_list1[-1])+" "+",RULE_ERROR_DESC = "+str(out_list1[-2])+" "+"WHERE UNIQITEMID ="+"\'"+out_list1[0]+"\'"+";")
cursor.execute ("""
UPDATE uniqtransaction
SET RULE_EXECUTION_COUNTER=(?), RULE_ERROR_DESC=(?)
WHERE UNIQITEMID=(?)
""", (str(out_list1[-1]), str(out_list1[-2]), out_list1[0]))
connection.commit()
return
def insert_db_smartqc_S5(out_list2):
#connection = pypyodbc.connect('Driver={SQL Server};Server=172.16.0.61;Database=smart_qc;uid=sa;pwd=p@ssw0rd')
connection = pypyodbc.connect('Driver={'+config["DATABASE"]["server"]+'};Server='+config["DATABASE"]["hostname"]+';Database='+config["DATABASE"]["database"]+';uid='+config["DATABASE"]["username"]+';pwd='+config["DATABASE"]["password"])
cursor = connection.cursor()
params = ['?' for item in out_list2]
stmt = 'INSERT INTO smartqc_S5 (articleid, ruleid, true_positive, true_negative, false_positive,false_negative) VALUES (%s);' % ','.join(params)
cursor.execute(stmt, out_list2)
connection.commit()
return
def insert_db_error_count(error_count, itemid):
#connection = pypyodbc.connect('Driver={SQL Server};Server=172.16.0.61;Database=smart_qc;uid=sa;pwd=p@ssw0rd')
connection = pypyodbc.connect('Driver={'+config["DATABASE"]["server"]+'};Server='+config["DATABASE"]["hostname"]+';Database='+config["DATABASE"]["database"]+';uid='+config["DATABASE"]["username"]+';pwd='+config["DATABASE"]["password"])
cursor = connection.cursor()
stmt = "UPDATE uniqmaster SET errorcount ="+" "+str(error_count)+" "+"WHERE uniqitemid ="+"\'"+itemid+"\'"+";"
cursor.execute(stmt)
connection.commit()
return
def append_log(outlist, exists):
for out_list in outlist:
if out_list[3] == "no error":
pass
else:
with open(exists, "r",encoding="utf-8") as f:
contents = f.read()
str_log = "<message id="+out_list[0]+" type= error"+" position=1:"+out_list[4]+">Error: "+out_list[5]+"</message>"
soup1 = BeautifulSoup(contents,features="lxml")
if soup1.results!=None:
soup1.results.append(BeautifulSoup(str_log,features="lxml"))
f.close()
with open(exists, "w",encoding="utf-8") as f:
for i in soup1:
f.write(str(i))
f.close()
return
def Batch_S5(folderPath,batch_Id):
try:
import time
Start_time = time.time()
#stage = "UNIQ_S5"
stage = config["STAGE"]["stage"]
print(stage,'stage?'*50)
path=folderPath
print(path)
#batchId = batch_Id
path_split = path.split("_")
print(path_split,'this is the split path')
print(path_split[-1])
if path_split[-1]=='110':
print('its in 110 block of batch'*5)
docx = "_".join(path_split[-3:-1])+".docx"
file_path = path.replace("\\","/") + "/tx1.xml"
print(file_path,'this is the file path')
file = file_path.split('/')[-2].split("_")
print(file,'this is the file path for error rectification')
jid = file[-3]
aid = file[-2]
itemid = jid+"_"+aid+"_"+stage
logging.info(' **-------Logger start from here for {}-------** '.format(itemid))
time1 = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
counter = 1
error_count = 0
uid = socket.gethostname()
exists = os.path.isfile(path.replace("\\","/")+"/UNIQ_S5.xml")
jss_path = path.replace("\\","/")+"/"+jid+"-jss.xml"
order_path = path.replace("\\","/")+"/"+jid+"_"+aid+"_order.xml"
mss= path.replace("\\","/")+"/"+jid+"_"+aid+".docx"
tp=0
fp=0
tn=0
fn=0
else:
print('its not in 110 block of batch'*5)
docx = "_".join(path_split[-2:])+".docx"
file_path = path.replace("\\","/") + "/tx1.xml"
print(file_path,'this is the file path')
file = file_path.split('/')[-2].split("_")
print(file,'this is the file path for error rectification')
jid = file[-2]
aid = file[-1]
itemid = jid+"_"+aid+"_"+stage
logging.info(' **-------Logger start from here for {}-------** '.format(itemid))
time1 = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
counter = 1
error_count = 0
uid = socket.gethostname()
exists = os.path.isfile(path.replace("\\","/")+"/UNIQ_S5.xml")
jss_path = path.replace("\\","/")+"/"+jid+"-jss.xml"
order_path = path.replace("\\","/")+"/"+jid+"_"+aid+"_order.xml"
mss= path.replace("\\","/")+"/"+jid+"_"+aid+".docx"
#mss = path+"/"+docx
#print('local path for mss2',mss2)
print('path for gen docx ',docx)
print('path for mss ',mss)
print('path for order ',order_path)
print('path for jss xml ',jss_path)
tp=0
fp=0
tn=0
fn=0
#def Batch_S5(folderPath,batch_Id):
# import time
# Start_time = time.time()
#
# stage = "UNIQ_S5"
# path=folderPath
# print(path)
# #batchId = batch_Id
# #path_split = path.split("_")
# path_split = path.split("\\")[-1].split("_")
# print(path_split,'this is the split path')
# #docx = "_".join(path_split[-3:-1])+".docx"
# docx = "_".join(path_split[-2:])+".docx"
# #copy2(path.replace("\\","/")+"/tx1.xml.log.xml",path.replace("\\","/")+"/UNIQ_S5.xml")
# file_path = path.replace("\\","/") + "/tx1.xml"
# print(file_path,'this is the file path')
# file = file_path.split('/')[-2].split("_")
# print(file,'this is the file path for error rectification')
# jid = file[-2]
# aid = file[-1]
# itemid = jid+"_"+aid+"_"+stage
# time1 = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
# counter = 1
# error_count = 0
# uid = socket.gethostname()
# exists = os.path.isfile(path.replace("\\","/")+"/UNIQ_S5.xml")
# jss_path = path.replace("\\","/")+"/"+jid+"-jss.xml"
# order_path = path.replace("\\","/")+"/"+jid+"_"+aid+"_order.xml"
# mss = path+"/"+docx
# tp=0
# fp=0
# tn=0
# fn=0
#print("starting path",path)
dest=r'E:\Desktop_files\TESTED FILES'
copy2(path.replace("\\","/")+"/tx1.xml.log.xml",dest.replace("\\","/")+"/temp_UNIQ_S5.xml")
#copy2(path+"\\tx1.xml.log.xml",path+"\\UNIQ_S5.xml")
#print("path",path)
log_path = dest.replace("\\","/")+"/temp_UNIQ_S5.xml"
Master_list = [itemid, uid, jid, aid, stage, time1, error_count, counter, time1, time.time()-Start_time]
insert_db_master(Master_list)
def log_transaction_db(obj,error_count,tp,tn,fp,fn):
append_log(obj,log_path)
tempdesc=""
ss=''
if 'error' in obj[0]:
ss=obj[0][-1]
error_count += len(obj)
tn+=1
else:
tp+=1
for sub in obj:
if "error" in sub:
#if(tempdesc==""):
# tn+=1
#if(tempdesc!="" and sub[-1]!=tempdesc):
# tn+=1
if (obj.index(sub)!=0):
ss=ss+';'+sub[-1]
#tempdesc=sub[-1]
#error_count+=1
if 'error' in obj[0]:
trans_list=[itemid, obj[0][1], obj[0][0], ss, len(ss.split(';'))]
insert_db_trans(trans_list)
return error_count,tp,tn,fp,fn
obj = Brf037a(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'second one','Vinay')
obj = Rule_17(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'third one','vipin')
obj = rule_15(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'fourth one','vipin')
obj = Rule_08(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'sixth one','preeti')
obj = Rule_93(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'eight one','preeti')
obj = Rule_91(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'ninth one','preeti')
obj = AUN009A(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'eleventh one','shani')
obj = ack003(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'twelveth one','shani')
obj = Rule_108(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'thirteenth one','preeti')
obj = article_history(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'fourteenth one','Vipin')
obj = rule_59(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'fifteenth one','vipin')
obj = highlights(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'sixteenth one','shani')
obj = rule_87(file_path,order_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'seventeenth one','preeti')
obj = Rule_89(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'eighteenth one','preeti')
obj = rule_HIS003A(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'19th one','shani')
obj = rule_73(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'20th one','vipin')
obj = rule_66(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'21th one','vipin')
obj = rule_71(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'22th one','vipin')
obj = rule_67(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'23th one','vipin')
obj = aug001(file_path, mss)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'24th one','shani')
obj = BRX011(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'25th one','rachit')
obj = Brx042(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'26th one','kumar')
obj = brx039(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'27th one','kumar')
obj = rule31(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'28th one','aditya')
obj = rule_afn003(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'29th one','shani')
obj = brx029b(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'30th one','kumar')
#obj = Table(file_path)
#error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
#print(obj,'31th one','shani')
obj = BRX007(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'31th one','aditya')
obj = BRX006(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'32th one','aditya')
obj = Table(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'33th one','shani')
obj = BRX018(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'34th one','rachit')
obj = BRX005A(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'35th one','aditya')
obj = brx030(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'36th one','kumar')
obj = brx041(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'37th one','kumar')
obj = BRX020A(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'38th one','rachit')
obj = brx032(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
## append_log(obj,log_path)
## for sub in obj:
## if "error" in sub:
## error_count+=1
## trans_list=[jid+"_"+aid+"_"+"S5", itemid, sub[1], sub[0], sub[-1], counter]
## insert_db_trans(trans_list)
print(obj,'39th one','kumar')
obj = BRX001a(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'40th one','aditya')
obj = BRX019(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'41th one','rachit')
obj = BRX024(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'42th one','rachit')
obj = BRF091B(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'43th one','vipin')
obj = BRX005B_35(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'44th one','vipin')
obj = aeu003(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'45th one','kumar')
obj = BRX020B(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'46th one','rachit')
obj = AFO002A(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'47th one','preeti')
obj = BRX005B_36(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'48th and 49th one','vipin')
obj = tab018(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'50th ','kumar')
obj = XsTB701(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'51th','kumar')
obj = XsTB702(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'52th','kumar')
obj = BRX022(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'53th','rachit')
obj = Rule_114(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'54th','preeti')
obj = rule132(file_path, order_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'55th','rachit')
obj = rule40(file_path, mss)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'56th','aditya')
obj = Rule_113(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'57th','preeti')
obj = rule41(file_path, mss, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'58th','aditya')
obj = table_header(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'59th','shani')
obj = rule_80(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'60th','vipin')
obj = rule_72(file_path,jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'61th','vipin')
obj = rule_78_138(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'62th','vipin')
obj = Rule_111(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'63th','preeti')
obj = Rule_96(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'64th','preeti')
obj = label(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'65th','shani')
obj = keywords(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'66th','shani')
obj = Rule_109(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'67th','preeti')
#obj = Authors_Email(file_path, mss)
#error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
#print(obj,'68th','Vinay')
obj = Rule139_140(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'69th','aditya')
#obj = corr_check(file_path)
#error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
#print(obj,'70th','shani') commented shani acl003 (151 UFA)
obj = rule_75(file_path, jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'71th','vipin')
obj = rule_69(file_path,mss, jss_path)
#print("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$",obj)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'72th','vipin')
obj = rule120(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'73th','aditya')
obj = rule_70(file_path,jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'74th','vipin')
obj = rule_79(file_path,jss_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'75th','vipin')
obj = rule_16(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'76th','vipin')
obj = rule_83(jss_path, file_path,order_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'77th','vipin')
obj = tab021( file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'78th','kumar')
obj = tab012(file_path, mss)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'79th','kumar')
obj = XsFG601(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'80th','kumar')
obj = XsFM101(file_path,order_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'81th','vipin')
obj = rule135(file_path, order_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'82th','rachit')
# obj = rule144(mss, file_path)
# error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
# print(obj,'83th','rachit')
# obj = rule130(file_path,json_path)
# append_log(obj,log_path)
# if "error" in obj:
# error_count+=1
# trans_list=[jid+"_"+aid+"_"+"S5", itemid, obj[2], obj[0], obj[-2], counter]
# insert_db_trans(trans_list)
# print(obj,'84th','rachit')
obj = Rule_7(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'84th','preeti')
obj = rule116(file_path, mss)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'85th','aditya')
obj = Rule_149(file_path, mss)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'86th','preeti')
obj = Tail(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'87th','shani')
obj = Rule_128(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'88th','preeti')
obj = CHE015_11(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'89th','rachit')
obj = fund(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'90th','vinay')
obj = AEU001A(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'91th','vipin')
obj = BRX010_42(jss_path,file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'92th','vipin')
obj = CHE_045(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'93th','manisha')
obj = FUN_002(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'94th','manisha')
obj = FUN_002(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'94th','manisha')
obj = CHE011(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'95th','Mayank')
obj = Ellip(file_path,jss_path)
#print(obj)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'96th','Mayank')
obj = BRF033_(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'97th','Mayank')
obj = BRF037A(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'98th','Mayank')
obj = DQO002(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'99th','Mayank')
obj = CHE011_tri(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'100th','Mayank')
obj = APP001(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'101st','preeti')
obj = abs004(file_path)
error_count,tp,tn,fp,fn,=log_transaction_db(obj,error_count,tp,tn,fp,fn)
print(obj,'102nd','preeti')
copy2(dest.replace("\\","/")+"/temp_UNIQ_S5.xml",path.replace("\\","/")+"/UNIQ_S5.xml")
os.remove(dest.replace("\\","/")+"/temp_UNIQ_S5.xml")
updated_Master_list = [itemid, error_count, time.time()-Start_time,tp,tn,fp,fn,batch_Id,uid]
update_db_master(updated_Master_list)
#logging.info('\n\n')
logging.info(' **-------Logger End from here for {}-------** '.format(itemid))
logging.info('\n\n')
logging.info('\n\n')
except Exception as e:
logging.info('=='*50)
logging.exception('Got exception on main handler-----------')
logging.info('\n\n')
logging.shutdown()
return
print(Batch_S5(path_for_file,"null"))
# folpath = 'C:/Users/80051/Desktop/uu'
# folpath = r'C:\Users\digiscape\Desktop\files_error\new_file'
# ldir = os.listdir(folpath)
# for i in ldir:
# pth = folpath + '/' + i
# print(Batch_S5(pth,'null'))
# print(Batch_S5("C:/Users/digiscape/Desktop/Dataset1/MNT_ELSEVIER_JOURNAL_CCLET_5021_110"))
| [
"noreply@github.com"
] | TarunGandotra.noreply@github.com |
5c8dd67ca45da6502659d71d6cc1ed997f15c654 | 81672127c3d85d90ad724788a8bfb83ba3a92bfc | /problem/lintcode/lint1216.py | 2f56fbf57f4701f337064f99329257f7fcbb47cd | [] | no_license | qdian88/pylearn | 01f8ac8db554740a7f119d5df116c8816bd094ef | 47abd49462c58765b5029699f5675f635353dc1d | refs/heads/master | 2020-05-29T21:00:05.141278 | 2019-05-30T07:27:44 | 2019-05-30T07:27:44 | 188,647,604 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 24 11:04:43 2019
@author: Administrator
"""
m=10**3
s_max=0
for i in range(m-1,m//10,-1):
for j in range(m-1,i-1,-1):
s=i*j
if str(s)==str(s)[::-1] and s > s_max:
s_max=s
print(s)
print(s_max) | [
"19463739@qq.com"
] | 19463739@qq.com |
651b60b515fe7843967505febf81ecf3864711a5 | a7f39db24ce26ab0f02650ffd97007222aa536c5 | /so.guishiwen.org_shiwen.py | 1c43a5299d50000c95b371c9ff9420f1d01ebc75 | [] | no_license | hpifu/py-ancient | 6f51067c4c6ef1adb8241994e03dccb29e35b501 | a845e86057432a39f8239263aa7bf0e97c3f4c76 | refs/heads/master | 2022-12-13T00:40:09.735917 | 2019-10-27T05:21:52 | 2019-10-27T05:21:52 | 204,660,319 | 0 | 0 | null | 2022-07-06T20:16:22 | 2019-08-27T08:48:26 | Python | UTF-8 | Python | false | false | 1,333 | py | #!/usr/bin/env python3
import requests
from pyquery import PyQuery as pq
www = "https://so.gushiwen.org/shiwen"
def getPage(url):
res = requests.get(
url,
headers={
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36",
}
)
return res.text
def analyst(text):
d = pq(text)
shis = []
sons = d("div.sons")
for son in sons.items():
name = son("p b").text()
if not name:
continue
infos = list(son("p.source a").items())
dynasty = infos[0].text()
author = infos[1].text()
content = son("div.contson").text()
tags = son("div.tag a").text()
shis.append({
"name": name,
"dynasty": dynasty,
"author": author,
"tags": tags,
"content": content,
})
next = d("div.pagesright a.amore").attr("href")
return shis, next
def main():
print(analyst(getPage(www+"/default_4A111111111111A1.aspx")))
if __name__ == '__main__':
main()
| [
"hatlonely@gmail.com"
] | hatlonely@gmail.com |
72886476b0c8673d4ab8017cbda9bd78e3b3caf1 | cf0eb4dd2929cf6cde79b5357ff3d793a51425ce | /src/mongowave/configuration.py | 6006c444fa7862fad3c17a4231a4ea1dd5b03c72 | [] | no_license | igor-petruk/mongo-wave | 5911182942dbe8870c5bd02f81deaa9837ca4f43 | 5a63a1f303c7a31c17d4d5a5d4f5fe644ac6a7fc | refs/heads/master | 2016-09-06T14:24:03.083573 | 2012-12-13T23:59:37 | 2012-12-13T23:59:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,264 | py | from configparser import *
import os
import json
import uuid
class ConfigLocation:
@staticmethod
def CONFIG_FILE():
return os.path.expanduser("~/.mongowave")
class ClientConnection:
def __init__(self):
self.id = uuid.uuid4()
self.name=""
self.host="localhost"
self.port=27017
self.db=""
self.user=""
class ConfigurationManager:
def __init__(self):
self.connections = []
self.active_connection = None
self.load()
def load(self):
config = ConfigParser()
config.read(ConfigLocation.CONFIG_FILE())
if config.has_section("Connections"):
connections = json.loads(config.get("Connections","list"))
active = config.get("Connections","active")
for section in connections:
connection = ClientConnection()
if section==active:
self.active_connection = connection
connection.id = uuid.UUID(section)
connection.name = config.get(section,"name")
connection.host = config.get(section,"host")
connection.port = int(config.get(section,"port"))
connection.db = config.get(section,"db")
connection.user = config.get(section,"user")
self.connections.append(connection)
def save(self):
config = ConfigParser()
config.add_section('Connections')
conn_ids=[]
for conn in self.connections:
section = str(conn.id)
conn_ids.append(section)
config.add_section(section)
config.set(section,"name",conn.name)
config.set(section,"host",conn.host)
config.set(section,"port",str(conn.port))
config.set(section,"db",conn.db)
config.set(section,"user",conn.user)
config.set('Connections',"list",json.dumps(conn_ids))
if self.active_connection is not None:
config.set('Connections',"active",str(self.active_connection.id))
else:
config.set('Connections',"active","")
with open(ConfigLocation.CONFIG_FILE(), 'wt', encoding='utf8') as configfile:
config.write(configfile)
| [
"igor.petrouk@gmail.com"
] | igor.petrouk@gmail.com |
b0005938274e2ee8f47d15f16514ef24a1c06540 | eeb04894d014a7bdd5501b4901e5d9818a29bd89 | /ex63.py | dead2d0dd94ebd07dbc94cc8a45553f97b113d7e | [
"MIT"
] | permissive | amarelopiupiu/python-exercicios | c9952828154abc42befffbea1b4a5a25e4c7ca62 | 4c1c535bf09854c7ce43405a4b55a2302c0edcda | refs/heads/main | 2023-05-25T08:11:34.294453 | 2021-06-09T20:32:35 | 2021-06-09T20:32:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | # Desenvolva um programa que leia o primeiro termo e a razão de uma PA. No final, mostre os 10 primeiros termos dessa progressão.
pt = int(input('Diga qual é o primeiro termo: '))
razão = int(input('Diga a razão: '))
décimo = pt + (10 - 1) * razão
for a in range(pt, razão + décimo, razão):
print(a, end=' ')
print('ACABOU') | [
"femahi2020@gmail.com"
] | femahi2020@gmail.com |
dc0a16d8c69607e0d52b47fbd9e0231d4266ac7f | 30ea7b9b9a88d542e9ea3c5b8def38e1b1d778e5 | /hw2/semantic_segmentation/dataset.py | 84f026529c7fbe3a2e50bf399cf78f0a53b05a9b | [] | no_license | kai860115/DLCV2020-FALL | 96ee63cc73969f4efe872870a0e6f8b0dfff08fd | cd07b9ac131b3bc5c22f92d57c80960dfc071ea3 | refs/heads/main | 2023-02-26T00:19:59.325488 | 2021-02-04T05:38:36 | 2021-02-04T05:38:36 | 335,480,530 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,944 | py | import re
import glob
import os
import random
import torch
from torch.utils.data import Dataset
import torchvision.transforms as transforms
import torchvision.transforms.functional as TF
from PIL import Image
def mask_target(im):
im = transforms.ToTensor()(im)
im = 4 * im[0] + 2 * im[1] + 1 * im[2]
target = torch.zeros(im.shape, dtype=torch.long)
target[im==3] = 0
target[im==6] = 1
target[im==5] = 2
target[im==2] = 3
target[im==1] = 4
target[im==7] = 5
target[im==0] = 6
target[im==4] = 6
return target
class myDataset(Dataset):
def __init__(self, root, transform=None, randomflip=False):
self.images = None
self.labels = None
self.filenames = []
self.root = root
self.transform = transform
self.randomflip = randomflip
# read filenames
sat_filenames = glob.glob(os.path.join(root, '*.jpg'))
sat_filenames.sort()
mask_filenames = glob.glob(os.path.join(root, '*.png'))
mask_filenames.sort()
for sat_fn, mask_fn in zip(sat_filenames, mask_filenames):
self.filenames.append((sat_fn, mask_fn))
self.len = len(self.filenames)
def __getitem__(self, index):
""" Get a sample from the dataset """
sat_fn, mask_fn = self.filenames[index]
sat = Image.open(sat_fn)
mask = Image.open(mask_fn)
if (self.randomflip):
if random.random() > 0.5:
sat = TF.hflip(sat)
mask = TF.hflip(mask)
if random.random() > 0.5:
sat = TF.vflip(sat)
mask = TF.vflip(mask)
if self.transform is not None:
sat = self.transform(sat)
return sat, mask_target(mask)
def __len__(self):
""" Total number of samples in the dataset """
return self.len
| [
"kai860115@gmail.com"
] | kai860115@gmail.com |
f2141c2a5ca33051d3a1f0f6e37b4171bbbfe7e4 | 8d19a7ce4deaa5f59acb8048e886ba7f05d87180 | /plotting/swa.py | 6451e09d0b9cbea33b85a7da12614eb00140cb38 | [
"MIT"
] | permissive | dejac001/MCFlow | bbc66529716e92de8b9507cc74b3de37da3d7143 | 19d1ee21318b49102842d75493a2fb830ec116f0 | refs/heads/master | 2022-12-17T15:08:46.648626 | 2020-07-26T18:24:53 | 2020-07-26T18:24:53 | 202,601,205 | 1 | 1 | MIT | 2022-12-08T10:45:35 | 2019-08-15T19:39:25 | Python | UTF-8 | Python | false | false | 2,637 | py | def make_grace(my_ax):
my_ax.tick_params(axis='y',direction='in',which='both',labelsize=10,left=True,right=True)
for dirn in ['top','bottom','left','right']:
my_ax.spines[dirn].set_color('black')
my_ax.spines[dirn].set_linewidth(1)
my_ax.tick_params(colors='black',size=5.,width=1.)
def plot_transfer_acceptance(data, feed, run):
# get list of move types and box transfers
move_names = []
boxes = []
legend = []
for key, value in data.items():
# get list of boxes involved
for boxes, results in value.items():
if results['accepted']['mean'] > 0.0:
legend.append( key + '-' + boxes)
# setup dimensions of plot
ind = np.arange(len(legend)) # the x locations for the groups
width = 0.15 # the width of the bars
fig, ax = plt.subplots()
vals = []
yerr = []
for move in sorted(legend):
molec, box = move.split('-')
move_data = data[molec][box]
percent_accepted = move_data['accepted']['mean'] / move_data['attempted']['mean'] * 100.
stdev = percent_accepted*np.sqrt(
math.pow(move_data['accepted']['stdev'] / move_data['accepted']['mean'],2)
+
math.pow(move_data['attempted']['stdev']/ move_data['attempted']['mean'],2)
)
vals.append(percent_accepted)
yerr.append(stdev)
ax.bar(ind, vals, width,yerr=yerr,color=np.random.rand(3,1))
make_grace(ax)
ax.set_ylabel('Percent Accepted')
ax.set_xticks(ind + width)
ax.set_xticklabels(sorted(legend),rotation=75.)
ax.set_yscale('log')
plt.subplots_adjust(left=0.11,right=0.97,top = 0.97, bottom=0.5)
fig = plt.gcf()
fig.set_size_inches(6.5, 6.5)
fig.savefig('%s-%s.png'%(feed,run), dpi=300)
plt.show()
import math, random
import numpy as np
import matplotlib.pyplot as plt
if __name__ == '__main__':
import argparse, os, shelve
from MCFlow.runAnalyzer import checkRun
parser = argparse.ArgumentParser(description='plot swap and swatch accptances for given feed')
parser.add_argument('-f','--feed',help='feed to analyze swaps and swatches for')
parser.add_argument('-t','--type',help='type of run to analyze',default='equil-')
args = vars(parser.parse_args())
assert os.path.isfile('SWAP-data.db'), 'No SWAP data found'
with shelve.open('SWAP-data.db') as db:
assert args['feed'] in db.keys(), 'Feed not in database'
data = {args['feed']: db[args['feed']] }
my_run = checkRun(args['type'],[data],args['feed'])
plot_transfer_acceptance(data[args['feed']][my_run],args['feed'],my_run)
| [
"dejac001@umn.edu"
] | dejac001@umn.edu |
c5f63fefce90ea7f2fb12d5a4269401e45a5c574 | 2af3c577ec934c0d36ba40fbb55d1b67a99069fb | /account/migrations/0001_initial.py | f1344a6eb4f2fba47558b0d637c0a0762408b2f2 | [] | no_license | sathish-ku-mar/fynd-assignment | a3b7e16863d3db48a484aa0afde57c2d7d250899 | 93dc963d9ec441fd8a892e0d22be3af7df4aa960 | refs/heads/master | 2022-12-01T21:05:52.190085 | 2020-08-16T04:41:41 | 2020-08-16T04:41:41 | 287,740,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,497 | py | # Generated by Django 3.1 on 2020-08-15 11:31
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_active', models.BooleanField(default=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
('name', models.CharField(help_text='The name of the user', max_length=200)),
('email', models.EmailField(help_text='A unique email id of the user', max_length=254, unique=True)),
('phone', models.CharField(help_text='The phone number of the user', max_length=15, validators=[django.core.validators.RegexValidator(message='Please Enter correct Contact no.', regex='^\\d{10,15}$')])),
('password', models.TextField(help_text='The hashed password of the user')),
('last_login', models.DateTimeField(blank=True, help_text='The last login of the user', null=True)),
('is_admin', models.BooleanField(default=False, help_text='Boolean flag to check user is admin or not')),
],
options={
'abstract': False,
},
),
]
| [
"sathishkumar1954@gmail.com"
] | sathishkumar1954@gmail.com |
11022183e4676e6e9f90235da6453034baea344b | 319ee59669806935266cadd5c97bb30fca906412 | /actualapp.py | 34254941b75429343706ae35e5eab7174def3b9d | [
"MIT"
] | permissive | zenador/sporebusbot | 0cd505d03db975fca53dbe0c9759d34d66cc50b0 | 2ccc214cd8da4f8d3931db7cabfc4fb8e5183a78 | refs/heads/master | 2021-01-01T03:44:53.483042 | 2020-12-23T18:57:28 | 2020-12-23T18:57:28 | 59,570,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | #!/usr/bin/env python
from flask import Flask#, send_from_directory
app = Flask(__name__)
app.config.from_pyfile('flaskapp.cfg')
@app.route('/')
def index():
return '.'
'''
@app.route('/ATriggerVerify.txt')
def show_atrigger_verify():
return send_from_directory("", 'ATriggerVerify.txt')
'''
| [
"zenador9@gmail.com"
] | zenador9@gmail.com |
90a66989e2c6b11e2cbe3b5a40014ec1d1beba97 | 36aea5790cc01c652326f6f5a5722f13ee9d498b | /m-hackathon/review10.py | e2ac404b28fb62f31280b7fb25534873fad595d3 | [] | no_license | kev158/NguyenTrongDuc-c4t | 2d9162a69dfa87e8ee24b93e4a72dc8811031cff | e1fba30f06c77bb7ab3271475d7ba6da9771ae09 | refs/heads/master | 2020-04-18T22:04:00.041524 | 2019-04-20T14:59:10 | 2019-04-20T14:59:10 | 167,783,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | dangnhap = input("tên đăng nhập:")
matkhau = input("Nhập mật khẩu:")
e = input("Nhập email:")
print(dangnhap)
print(matkhau)
print(e)
print("đăng ký thành công") | [
"trongduc811@gmail.com"
] | trongduc811@gmail.com |
90176d92ce58020a5923f9b84ebb560a8b0c83db | 3d9a2964d25cf7f3bdd03be8019e176d743e793b | /cyberAttackSimulation.py | 471c87fd70869587dd090430e7fc0573d7efb8e2 | [] | no_license | connoryin/cyber-attack-simulation | cd68e1d47be22adab7673541d795d4c44f783ad1 | 36df25102ebef807fb492c174063decc4b7b9a00 | refs/heads/master | 2020-12-22T02:57:21.518303 | 2020-02-17T22:38:10 | 2020-02-17T22:38:10 | 236,650,194 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,869 | py | from graph_tool.all import *
from numpy.random import *
import numpy as np
from random import sample, gauss
import matplotlib.pyplot as plt
from matplotlib.figure import Figure
from matplotlib.backends.backend_gtk3cairo import FigureCanvasGTK3Cairo as FigureCanvas
import sys, os, os.path
import time, math
import statsmodels.api as sm
from arch import arch_model
seed()
from gi.repository import Gtk, Gdk, GdkPixbuf, GObject
plt.switch_backend('cairo')
# parameters:
r = 0.1 # I->S probability
s = 0.1 # R->S probability
beta = 0.05 #infection rate
number_of_infected_at_beginning = 5
time_to_stop = 200
graph_to_show = 'random'
colors = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'Orange', 'Tomato', 'Navy', 'Plum', 'Purple']
simulation = 0
def create_qq_plot():
f = Figure()
a = f.add_subplot(221)
b = f.add_subplot(222)
c = f.add_subplot(223)
d = f.add_subplot(224)
f.subplots_adjust(hspace=0.4)
f.suptitle('QQ plots of PDF & log-difference vs. normal distribution', fontweight='bold')
canvas = FigureCanvas(f)
return a, b, c, d, canvas
def create_plot(xlabel, ylabel, title, data):
f = Figure()
a = f.add_subplot(211)
num = 0
for d in data[0::2]:
a.plot(d, colors[num])
num += 1
if num >= len(colors):
num = 0
a.set_xlabel(xlabel, labelpad=0, fontdict={'fontweight': 'bold'})
a.set_ylabel(ylabel, labelpad=0, fontdict={'fontweight': 'bold'})
b = f.add_subplot(212)
num = 0
for d in data[1::2]:
b.plot(d, colors[num])
num += 1
if num >= len(colors):
num = 0
b.set_xlabel(xlabel, labelpad=0, fontdict={'fontweight': 'bold'})
b.set_ylabel(ylabel, labelpad=0, fontdict={'fontweight': 'bold'})
if (title == 'Prediction of percentage of infected nodes'): f.subplots_adjust(hspace=0.6)
canvas = FigureCanvas(f)
return a, b, canvas
def create_graph(type, size=100,
p_for_random_graph=0.1,
mean_for_homogeneous_graph=6,
standard_deviation_for_homogeneous_graph=0.25):
g = Graph(directed=False)
if type == 'price':
g = price_network(size)
g = GraphView(g, directed=False)
elif type == 'ring':
g = circular_graph(size)
elif type == 'star':
node = g.add_vertex()
leaves = g.add_vertex(size - 1)
for l in leaves:
g.add_edge(l, node)
elif type == 'cluster':
running_size = size
rnd = random_integers(1, running_size)
g = complete_graph(rnd)
v = g.vertex(randint(0, g.num_vertices()))
preV = g.vertex(randint(0, g.num_vertices()))
running_size -= rnd
while (running_size > 0):
rnd = random_integers(1, running_size)
g.add_vertex(rnd)
for i in range(g.num_vertices() - rnd, g.num_vertices()):
for j in range(i + 1, g.num_vertices()):
g.add_edge(i, j)
running_size -= rnd
curV = g.vertex(randint(g.num_vertices() - rnd, g.num_vertices()))
g.add_edge(preV, curV)
preV = g.vertex(randint(g.num_vertices() - rnd, g.num_vertices()))
g.add_edge(preV, v)
elif type == 'random':
g.add_vertex(size)
for i, j in [(i, j) for i in range(0, size) for j in range(i + 1, size)]:
if random() < p_for_random_graph:
g.add_edge(g.vertex(i), g.vertex(j))
elif type == 'homogeneous':
g = random_graph(size,
lambda: math.ceil(gauss(mean_for_homogeneous_graph, standard_deviation_for_homogeneous_graph)),
directed=False)
return g
def update_data(figure, xlabel, ylabel, title, data):
figure.clear()
num = 0
for d in data:
if title == 'Prediction of percentage of infected nodes':
figure.hist(d, color=colors[num], histtype='bar', rwidth=0.5)
else:
figure.plot(d, colors[num])
num += 1
if num >= len(colors):
num = 0
figure.set_xlabel(xlabel, labelpad=0, fontdict={'fontweight': 'bold'})
figure.set_ylabel(ylabel, labelpad=0, fontdict={'fontweight': 'bold'})
figure.set_title(title, {'fontweight': 'bold'})
figure.grid()
graph_list = []
g = create_graph(graph_to_show)
graph_list.append(g)
graph_list.append(g.copy())
layout_list = []
for graph in graph_list:
layout_list.append(sfdp_layout(graph))
simulation_list = graph_list.copy()
mst = min_spanning_tree(g)
mstg = GraphView(g, efilt=mst, directed=False)
mstg = Graph(mstg, prune=True)
simulation_list.append(mstg)
simulation_list.append(mstg.copy())
cover, c = max_cardinality_matching(g)
coverg = GraphView(g, efilt=cover, directed=False)
coverg = Graph(coverg, prune=True)
simulation_list.append(coverg)
simulation_list.append(coverg.copy())
def find_rep(a, list):
if list[a] == a:
return a
else:
list[a] = find_rep(list[a], list)
return list[a]
connected_coverg = coverg.copy()
for v in connected_coverg.vertices():
flag = False
for n in v.all_neighbors():
for nn in n.all_neighbors():
if not connected_coverg.vertex_index[nn]:
flag = True
if not flag:
connected_coverg.add_edge(connected_coverg.vertex(0), v)
simulation_list.append(connected_coverg)
simulation_list.append(connected_coverg.copy())
cutg = g.copy()
cut = g.new_vertex_property('bool')
cut_array = cut.a
for i in range(len(cut_array)):
cut_array[i] = randint(0, 3)
for e in g.edges():
if cut[e.source()] != cut[e.target()]:
if len(list(cutg.vertex(g.vertex_index[e.source()]).all_neighbors())) != 1 and len(
list(cutg.vertex(g.vertex_index[e.target()]).all_neighbors())) != 1:
cutg.remove_edge(e)
simulation_list.append(cutg)
simulation_list.append(cutg.copy())
model_list = ['SIS', 'SIRS', 'SIS', 'SIRS', 'SIS', 'SIRS', 'SIS', 'SIRS', 'SIS', 'SIRS']
label_list = ['SIS', 'SIRS', 'MST', 'Edge Cover', 'Connected Edge Cover', 'Exact Cover', 'MST', 'Edge Cover',
'Connected Edge Cover', 'Exact Cover']
S = [0, 1, 0, 1] # Green color
I = [1, 0, 0, 1] # Red color
R = [0, 1, 1, 1] # Blue color
E = [1, 0, 1, 1] # Purple color
state_list = []
for g in simulation_list:
state_list.append(g.new_vertex_property("vector<double>"))
for i, g in enumerate(simulation_list):
for v in g.vertices():
state_list[i][v] = S
vt = list(g.vertices())
sp = sample(vt, number_of_infected_at_beginning)
for s in sp:
state_list[i][s] = I
frequency_list = []
for g in simulation_list:
frequency_list.append([number_of_infected_at_beginning / g.num_vertices()])
distribution_list = []
for g in simulation_list:
distribution_list.append([0] * (g.num_vertices() + 1))
num_infected_list = [number_of_infected_at_beginning] * len(simulation_list)
newly_infected_list = []
for g in simulation_list:
newly_infected_list.append(g.new_vertex_property("bool"))
edge_state_list = []
for g in graph_list:
eprop = g.new_edge_property('vector<double>')
for e in g.edges():
eprop[e] = [0.8, 0.8, 0.8, 1]
edge_state_list.append(eprop)
log_list = [[] for i in range(len(simulation_list))]
error_list = [[] for i in range(len(simulation_list))]
time = 1
offscreen = sys.argv[1] == "offscreen" if len(sys.argv) > 1 else False
max_count = 20
if offscreen and not os.path.exists("../frames"):
os.mkdir("../frames")
class SimulationWindow(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title='Cyber Attack Simulation')
self.big_box = Gtk.Box()
self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.box_2 = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.legend_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.graph_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(self.big_box)
self.graphs = []
for i, g in enumerate(graph_list):
label = Gtk.Label(
'<span size="xx-large" weight="bold">' + model_list[i] + '</span>')
label.set_use_markup(True)
self.graph_box.pack_start(label, True, True, 0)
graph_draw(g, pos=layout_list[i],
vprops={'fill_color': state_list[i], 'halo': newly_infected_list[i], 'halo_color': [1, 0, 1, 1]},
eprops={'color': edge_state_list[i]}, output=str(i) + '.png',
output_size=(400, 400))
img = Gtk.Image()
img.set_from_file(str(i) + '.png')
self.graphs.append(img)
self.graph_box.pack_start(self.graphs[i], True, True, 0)
img = Gtk.Image()
img.set_from_file('legend.png')
self.graph_box.pack_start(img, True, True, 0)
self.big_box.pack_start(self.graph_box, False, False, 0)
self.a11, self.a12, self.canvas1 = create_plot('', '', '',
frequency_list)
frame1=Gtk.Frame()
frame1.add(self.canvas1)
self.box.pack_start(frame1, True, True, 0)
self.a21, self.a22, self.canvas2 = create_plot('Number of infected nodes', 'Frequency',
'Frequency Density (PDF) of percentage of infected nodes',
[[x / time for x in distribution_list[i]] for i in
range(len(distribution_list))])
frame2 = Gtk.Frame()
frame2.add(self.canvas2)
self.box.pack_start(frame2, True, True, 0)
self.a31, self.a32, self.canvas3 = create_plot('', '', '', [])
frame3 = Gtk.Frame()
frame3.add(self.canvas3)
self.box.pack_start(frame3, True, True, 0)
self.a41, self.a42, self.canvas4 = create_plot('', '', 'Prediction of percentage of infected nodes', log_list)
frame4 = Gtk.Frame()
frame4.add(self.canvas4)
self.box_2.pack_start(frame4, True, True, 0)
self.a51, self.a52, self.canvas5 = create_plot('', '', 'Prediction of percentage of infected nodes', [])
frame5 = Gtk.Frame()
frame5.add(self.canvas5)
self.box_2.pack_start(frame5, True, True, 0)
self.a61, self.a62, self.a63, self.a64, self.canvas6 = create_qq_plot()
frame5 = Gtk.Frame()
frame5.add(self.canvas6)
self.box_2.pack_start(frame5, True, True, 0)
self.big_box.pack_start(self.box, True, True, 0)
self.big_box.pack_start(self.box_2, True, True, 0)
self.set_default_size(1920, 1080)
# This creates a GTK+ window with the initial graph layout
if not offscreen:
win = SimulationWindow()
else:
pass
def update_state():
for n in newly_infected_list:
n.a = False
global time
global simulation
# visit the nodes in random order
for i, g in enumerate(simulation_list):
if i < len(graph_list):
for e in g.edges():
edge_state_list[i][e] = [0.8, 0.8, 0.8, 1]
newState = state_list[i].copy()
for v in g.vertices():
if state_list[i][v] == I:
if random() < r:
if model_list[i] == 'SIS':
newState[v] = S
else:
newState[v] = R
num_infected_list[i] -= 1
elif state_list[i][v] == S:
ns = list(v.all_neighbors())
p = 0
for neighbor in ns:
if state_list[i][neighbor] == I:
p += 1
p *= beta
if random() < p:
if model_list[i] == 'SEIR':
newState[v] = E
else:
newState[v] = I
newly_infected_list[i][v] = True
num_infected_list[i] += 1
if i < len(graph_list):
for e in v.all_edges():
edge_state_list[i][e] = [1, 165 / 255, 0, 1]
elif state_list[i][v] == R:
if model_list[i] == 'SIRS':
if random() < s:
newState[v] = S
state_list[i].swap(newState)
frequency_list[i].append(num_infected_list[i] / g.num_vertices())
log_list[i].append(math.log1p(frequency_list[i][-1]) - math.log1p(frequency_list[i][-2]))
distribution_list[i][num_infected_list[i]] += 1
time += 1
if offscreen:
pass
else:
for i, graph in enumerate(win.graphs):
graph_draw(graph_list[i], pos=layout_list[i],
vprops={'fill_color': state_list[i], 'halo': newly_infected_list[i], 'halo_color': [1, 0, 1, 1]},
eprops={'color': edge_state_list[i]}, output=str(i) + '.png',
output_size=(400, 400))
graph.set_from_file(str(i) + '.png')
update_data(win.a11, '', 'Percentage(SIS)', 'Percentage of Infected Nodes (Time Series)',
frequency_list[0::2])
update_data(win.a12, 'Time', 'Percentage(SIRS)', '',
frequency_list[1::2])
update_data(win.a21, '', 'Frequency(SIS)',
'Frequency Density (PDF) of percentage of infected nodes',
[[x / time for x in distribution_list[i]] for i in range(len(distribution_list)) if i % 2 == 0])
update_data(win.a22, 'Percentage of infected nodes', 'Frequency(SIRS)',
'',
[[x / time for x in distribution_list[i]] for i in
range(len(distribution_list)) if i % 2 != 0])
update_data(win.a41, 'time', 'Log-Difference', 'Log-Difference & Prediction of SIS', log_list[0::2])
update_data(win.a51, 'time', 'Log-Difference', 'Log-Difference & Prediction of SIRS', log_list[1::2])
if time > 2:
win.a31.clear()
for i, log in enumerate(log_list[0::2]):
sm.graphics.tsa.plot_acf((np.array(log) ** 2), win.a31, c=colors[i], markersize=4)
win.a31.set_title('ACF of log-difference squared', {'fontweight': 'bold'})
win.a31.set_ylabel('(SIS)', {'fontweight': 'bold'})
win.a31.grid()
win.a32.clear()
for i, log in enumerate(log_list[1::2]):
sm.graphics.tsa.plot_acf((np.array(log) ** 2), win.a32, c=colors[i], markersize=4)
win.a32.set_title('')
win.a32.set_ylabel('(SIRS)', {'fontweight': 'bold'})
win.a32.grid()
win.a61.clear()
for i,d in enumerate(distribution_list[0::2]):
sm.qqplot(np.array(d), line='45', ax=win.a61, c=colors[i], markersize=4)
win.a61.set_title('PDF (SIS)', {'fontweight': 'bold'})
win.a61.set_xlabel('')
win.a61.set_ylabel('')
win.a61.grid()
win.a62.clear()
for i, d in enumerate(distribution_list[1::2]):
sm.qqplot(np.array(d), line='45', ax=win.a62, c=colors[i], markersize=4)
win.a62.set_title('PDF (SIRS)', {'fontweight': 'bold'})
win.a62.set_xlabel('')
win.a62.set_ylabel('')
win.a62.grid()
win.a63.clear()
for i,l in enumerate(log_list[0::2]):
sm.qqplot(np.array(l), line='45', ax=win.a63, c=colors[i], markersize=4)
win.a63.set_title('log-difference (SIS)', {'fontweight': 'bold'})
win.a63.set_ylabel('')
win.a63.set_xlabel('')
win.a63.grid()
win.a64.clear()
for i, l in enumerate(log_list[1::2]):
sm.qqplot(np.array(l), line='45', ax=win.a64, c=colors[i], markersize=4)
win.a64.set_title('log-difference (SIRS)', {'fontweight': 'bold'})
win.a64.set_ylabel('')
win.a64.set_xlabel('')
win.a64.grid()
for i, log in enumerate(log_list):
print('################'+str(time)+'######################')
prediction = arch_model(100*(np.array(log[:-1]))).fit().forecast(horizon=5).mean.at[time-3,'h.1']
error_list[i].append(abs(prediction/100 - log[-1]))
update_data(win.a42, 'error', 'Frequency', 'Prediction of percentage of infected nodes', error_list[0::2])
update_data(win.a52, 'error', 'Frequency', 'Prediction of percentage of infected nodes', error_list[1::2])
win.canvas1.draw()
win.canvas2.draw()
win.canvas3.draw()
win.canvas4.draw()
win.canvas5.draw()
win.canvas6.draw()
# if doing an offscreen animation, dump frame to disk
if offscreen:
global count
pixbuf = win.get_pixbuf()
pixbuf.savev(r'./frames/%06dgraph.png' % count, 'png', [], [])
if count > max_count:
sys.exit(0)
count += 1
if time == time_to_stop:
win.a21.legend(['mean: '+str(int(mean))+', median: '+str(int(median)) for mean, median in zip(100 * np.array(frequency_list[0::2]).mean(axis=1), np.median(100 * np.array(frequency_list[0::2]),axis=1))])
win.a22.legend(['mean: '+str(int(mean))+', median: '+str(int(median)) for mean, median in zip(100 * np.array(frequency_list[1::2]).mean(axis=1), np.median(100 * np.array(frequency_list[1::2]),axis=1))])
win.canvas2.draw()
return False
return True
# Bind the function above as an 'idle' callback.
cid = GObject.idle_add(update_state)
# We will give the user the ability to stop the program by closing the window.
win.connect("delete_event", Gtk.main_quit)
# Actually show the window, and start the main loop.
win.show_all()
Gtk.main()
| [
"connor@sjtu.edu.cn"
] | connor@sjtu.edu.cn |
e5a7b23422f40b51cdbaf54d658a81c03f5c2e6a | 5d21443ec6f858f6d5e5d260a670c94a2bc176e7 | /spider_jiandan2.py | 1c6c56a96049301dda4fa304fee23a332061ef14 | [
"MIT"
] | permissive | Sablier/SpiderBoy | 0f7a16571ea2148508eabfe3b074ec4e63b58f3c | 53b007d0b1b6e2375e5ee71d4b4e5e7deeeacafc | refs/heads/master | 2020-05-07T18:23:43.717739 | 2019-06-12T17:09:48 | 2019-06-12T17:09:48 | 180,764,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,588 | py | """使用面向对象思想编程。使用selenium包获取html内容,使用lxml解析网页"""
import os
import requests
from lxml import etree
from selenium import webdriver
class GetJandan(object):
def __init__(self,path,page=10):
self.start_url = 'http://jandan.net/ooxx'
self.url = ""
self.save_path = path
self.page = page
def get_html(self,url):
browser = webdriver.PhantomJS()
browser.get(url)
print("get", url)
return browser.page_source
def get_1stpage(self):
"""打印首页用来测试选择器,获取页码数,所以直接使用selenium包手写元素获取代码"""
browser = webdriver.PhantomJS()
browser.get(self.start_url )
target = browser.find_element_by_xpath('//span[@class="current-comment-page"]')
first_page = target.text
first_page = eval(first_page)[0]
print("first page is %s" % first_page)
browser.close()
return first_page
def search_img_url(self,html):
url_list = []
element = etree.HTML(html)
targets = element.xpath('//img[@referrerpolicy="no-referrer"]/@src')
print(targets)
for each in targets:
url_list.append(each)
return url_list
def visit_page(self,first_page):
url_list = []
for i in range(self.page):
page_num = first_page - i
url = "http://jandan.net/ooxx/page-" + str(page_num) + "#comments"
print('start getting page: ', url)
html = self.get_html(url)
list = self.search_img_url(html)
url_list += list
return url_list
def get_img(self,url_list):
file_num = 0
for url in url_list:
file_name = str(file_num) + ".jpg"
url = "http:" + url
response = requests.get(url)
picture = response.content
self.save(picture,file_name)
file_num += 1
def save(self,file,name):
with open( path + name ,"wb") as f:
f.write(file)
print("save picture %s" % name)
def run(self):
if not os.path.exists(self.save_path):
os.mkdir(self.save_path)
else:
os.chdir(self.save_path)
first_page = self.get_1stpage()
url_list = self.visit_page(first_page)
self.get_img(url_list)
if __name__ == '__main__':
path = "C:/Users/BiaobiaoPeng/Desktop/python/projects/demoHTTP/images-jandan/"
page = 3
jandan = GetJandan(path,page)
jandan.run()
| [
"albertpeng1991@gmail.com"
] | albertpeng1991@gmail.com |
ba5f4cd07918520b871fedc8ce034363db2a0906 | 5ca893fc4b0f65774260fd8909b12706bcf1a9fa | /tests/test_is_pangram.py | 11f0a6064dcfbf21cf6c3cbecf0859e4bd203e95 | [
"MIT"
] | permissive | daveoncode/python-string-utils | 27fe323292860e78bac954177c2556aa8f3f25b5 | 78929d88d90b1f90cb4837528ed955166bf0f559 | refs/heads/master | 2022-12-11T20:14:30.087257 | 2020-03-31T11:23:35 | 2020-03-31T11:23:35 | 39,955,389 | 55 | 12 | MIT | 2022-12-08T03:44:22 | 2015-07-30T14:39:06 | Python | UTF-8 | Python | false | false | 900 | py | from unittest import TestCase
from string_utils import is_pangram
class IsPangramTestCase(TestCase):
def test_non_string_objects_return_false(self):
# noinspection PyTypeChecker
self.assertFalse(is_pangram(1))
# noinspection PyTypeChecker
self.assertFalse(is_pangram(['xx']))
# noinspection PyTypeChecker
self.assertFalse(is_pangram({}))
# noinspection PyTypeChecker
self.assertFalse(is_pangram(False))
# noinspection PyTypeChecker
self.assertFalse(is_pangram((1, 2, 3)))
# noinspection PyTypeChecker
self.assertFalse(is_pangram(object()))
def test_is_pangram_returns_false_if_not_pangram(self):
self.assertFalse(is_pangram('hello world'))
def test_is_pangram_returns_true_if_pangram(self):
self.assertTrue(is_pangram('The quick brown fox jumps over the lazy dog'))
| [
"davidezanotti@gmail.com"
] | davidezanotti@gmail.com |
148c8cc9b4af58e8cbf0df51d9ddf6cd1016874d | abda045e5516fb64f6e52031aced1da8e18e5bf7 | /tests/test_tokens.py | 7a801230e6afd238ef17921bc3cd9e9223ab3003 | [] | no_license | omar212/pascal.py | a99d92fdc7a23ca8cad4ddbd7df8f473523c6c3e | d51745bb85198c99a918e68289703a822648d3ad | refs/heads/master | 2020-05-26T05:51:37.827324 | 2014-06-19T06:51:39 | 2014-06-19T06:51:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,587 | py | import unittest
from modules.token_creator import TokenCreator
from modules.pascal_token import Token
class TestTokenAttributes(unittest.TestCase):
"""test that attribute methods return correct boolean values"""
def setUp(self):
self.tokenCreator = TokenCreator()
# paramters to create tokens.
tk_add_tuple = ('+', 0, 0, 'handle_symbol')
tk_sub_tuple = ('-', 0, 0, 'handle_symbol')
tk_mult_tuple = ('*', 0, 0, 'handle_symbol')
tk_division_tuple = ('/', 0, 0, 'handle_symbol')
#tk_div = ('+', 0, 0, 'handle_symbol')
#tk_mod = ('+', 0, 0, 'handle_symbol')
#tk_and = ('+', 0, 0, 'handle_symbol')
self.tk_add = self.tokenCreator.create(tk_add_tuple)
self.tk_sub = self.tokenCreator.create(tk_sub_tuple)
self.tk_mult = self.tokenCreator.create(tk_mult_tuple)
self.tk_div = self.tokenCreator.create(tk_division_tuple)
#self.tk_add = self.tokenCreator.create(tk_add_tuple)
def tearDown(self):
pass
def test_tk_add_is_unary(self):
self.assertTrue(self.tk_add.is_unary_operator())
def test_tk_add_is_mult(self):
self.assertFalse(self.tk_add.is_mult_operator())
def test_tk_add_is_add(self):
self.assertTrue(self.tk_add.is_add_operator())
def test_tk_add_is_relation(self):
self.assertFalse(self.tk_add.is_relation_operator())
def test_tk_sub_is_unary(self):
self.assertTrue(self.tk_sub.is_unary_operator())
def test_tk_sub_is_mult(self):
self.assertFalse(self.tk_sub.is_mult_operator())
def test_tk_sub_is_add(self):
self.assertTrue(self.tk_sub.is_add_operator())
def test_tk_sub_is_relation(self):
self.assertFalse(self.tk_sub.is_relation_operator())
def test_tk_mult_is_unary(self):
self.assertFalse(self.tk_mult.is_unary_operator())
def test_tk_mult_is_mult(self):
self.assertTrue(self.tk_mult.is_mult_operator())
def test_tk_mult_is_add(self):
self.assertFalse(self.tk_mult.is_add_operator())
def test_tk_mult_is_relation(self):
self.assertFalse(self.tk_mult.is_relation_operator())
def test_tk_div_is_unary(self):
self.assertFalse(self.tk_div.is_unary_operator())
def test_tk_div_is_mult(self):
self.assertTrue(self.tk_div.is_mult_operator())
def test_tk_div_is_add(self):
self.assertFalse(self.tk_div.is_add_operator())
def test_tk_div_is_relation(self):
self.assertFalse(self.tk_div.is_relation_operator())
| [
"brianwu02@gmail.com"
] | brianwu02@gmail.com |
db32253902147d6de63a312faa4dc7a41e150337 | c016088a3bdb255d4f5253185d27b5a4c75feb1b | /11_testing_your_code/11_3/employee.py | 919260baf759a0a8360fcd951f4ce7399a8e2888 | [
"MIT"
] | permissive | simonhoch/python_basics | b0b7c37ff647b653bb4c16a116e5521fc6b438b6 | 4ecf12c074e641e3cdeb0a6690846eb9133f96af | refs/heads/master | 2021-04-03T10:11:10.660454 | 2018-03-13T20:04:46 | 2018-03-13T20:26:25 | 125,107,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | class Employee ():
"""Simple attempt to describe an employee"""
def __init__(self, first_name, last_name, annual_salary):
"""Initialization of the employee class"""
self.first_name = first_name.title()
self.last_name = last_name.title()
self.annual_salary = annual_salary
def give_a_raise(self, salary_raise=5000):
"""Add a raise for an employee"""
self.annual_salary += salary_raise
def edit_informations(self):
"""Edit information of a salary"""
print(self.first_name + ', ' + self.last_name + ', salary: '
+ str(self.annual_salary))
| [
"simonhoch1@gmail.com"
] | simonhoch1@gmail.com |
dd5173273d0d804b36621c3206594029a67b98df | 1d1e1ff4614e085ad5187fbc6aa68a003a5f1ac2 | /setup.py | 837db8adddf7a2f91b2082c288d1e68a96f52b8d | [
"MIT"
] | permissive | malisal/streampie | 9e83cf9ba51a7c241935d88f3821ed256de20133 | 7b1b24b01b234cd093a350f04ffaf1a4974282ad | refs/heads/master | 2021-01-10T07:51:44.333259 | 2016-04-20T17:11:07 | 2016-04-20T17:11:07 | 53,448,331 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 888 | py | from setuptools import setup
with open("requirements.txt") as f:
required = f.read().splitlines()
setup(
name = "streampie",
version = "0.2.4",
description = "A simple, parallel stream processing library",
author = "Luka Malisa",
author_email = "luka.malisha@gmail.com",
license = "MIT",
url = "https://github.com/malisal/streampie",
keywords = ["stream", "parallel", "distributed"],
platforms=["any"],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
install_requires = [
"dill>=0.2.5",
"redis>=2.10.5",
"sphinx>=1.3.6",
"ipython>=4.1.2",
"matplotlib>=1.5.1",
"wheel>=0.29.0",
],
py_modules = ["streampie"],
)
| [
"luka.malisha@gmail.com"
] | luka.malisha@gmail.com |
84c6464c17e376eb7e83ffc85264a665216ccf37 | c70457843b82b4ea0212feb81153be1b7cbe8240 | /lab_04/orderedset.py | fe251a155503050e7097144084a7941caccbc93b | [] | no_license | alexbardas/krr | 65b4443a784d952d0a07a0dbf7b801ef54b00bb8 | e88c40b61b9b09612f83e0cd1c488291cc0446f0 | refs/heads/master | 2016-09-01T21:07:27.774450 | 2013-01-15T23:47:01 | 2013-01-15T23:47:01 | 6,520,495 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,807 | py | import collections
KEY, PREV, NEXT = range(3)
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[PREV]
curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[NEXT] = next
next[PREV] = prev
def __iter__(self):
end = self.end
curr = end[NEXT]
while curr is not end:
yield curr[KEY]
curr = curr[NEXT]
def __reversed__(self):
end = self.end
curr = end[PREV]
while curr is not end:
yield curr[KEY]
curr = curr[PREV]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = next(reversed(self)) if last else next(iter(self))
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
def __del__(self):
self.clear() # remove circular references | [
"alexbardas@gmail.com"
] | alexbardas@gmail.com |
278789d4b6a9951c0f6d6bf55614b7139a62b95b | 4e208d8d4f34c9f0f946d5c762dc7dc94a2593db | /apps/my_app/migrations/0001_initial.py | 47ea576627ec29e03f3dfd3eeeac42ff671a913b | [] | no_license | tarrdog/python_black_belt | 0231c71752c72c02729ad1e8627a99ce07e64c36 | fa53d48a0d42a26a5174c102c13ec33f83e79421 | refs/heads/master | 2020-12-03T00:43:00.206731 | 2017-07-03T04:05:08 | 2017-07-03T04:05:08 | 96,069,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,545 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-03 00:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Appointment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('status', models.CharField(max_length=10)),
('date', models.DateField()),
('time', models.TimeField()),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('password', models.CharField(max_length=20)),
('email', models.CharField(max_length=50)),
('date_of_birth', models.DateField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.AddField(
model_name='appointment',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='my_app.User'),
),
]
| [
"tarrdog@gmail.com"
] | tarrdog@gmail.com |
627162d6e931f6e673884ff3d7894f226d49c18b | 613057da52adee14bd24c7570aa54291413f0792 | /身份证校验系统.py | f4bdfa40818d62aa7d5108cd30cf17b28e323754 | [] | no_license | han925981034/python | 565d3ee95bc73017e3658b4c8af5a455664c7e8b | 8b48b5f3481477252d66a917943f67599a8e6ea1 | refs/heads/master | 2020-08-16T14:54:17.745165 | 2019-10-16T11:26:26 | 2019-10-16T11:26:26 | 215,514,430 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,513 | py | from tkinter import *
import time,datetime
class IDCCheck():
def __init__(self):
self.win=Tk()
self.win.geometry('700x400')
self.win.title('身份验证系统')
self.win['bg'] = 'lightblue'
self.image = PhotoImage(file='11.png')
self.lable_image=Label(self.win,image=self.image)
self.lable_image.place(x=10,y=10)
self.id_card=Label(self.win,text='请输入身份证号码:',font=('微软雅黑',14,'bold'),bg='navy',fg='lightblue')
self.id_card.place(x=280,y=10,width=200)
self.result_exits = StringVar()
self.entry=Entry(self.win,textvariable=self.result_exits)
self.entry.place(x=280,y=50,width=270,height=30)
self.result_exits.set("")
self.button_exits=Button(self.win,text='校验',font=('微软雅黑',12,'bold'),command=self.Id_birth,fg='navy')
self.button_exits.place(x=580,y=45,width=60)
self.result_true = StringVar()
self.lable_check = Label(self.win, text='是否有效:', font=('微软雅黑', 14, 'bold'), fg="navy", bg="lightblue")
self.lable_check.place(x=280, y=110)
self.result_true.set("")
self.entry_text = Entry(self.win,state=DISABLED,textvariable=self.result_true)
self.entry_text.place(x=380, y=115,height=25,width=90)
self.result_sex = StringVar()
self.lable_sex = Label(self.win, text=' 性别:', font=('微软雅黑', 14, 'bold'), fg="navy", bg="lightblue")
self.lable_sex.place(x=280, y=160)
self.result_sex.set("")
self.entry_text1= Entry(self.win, state=DISABLED,textvariable=self.result_sex)
self.entry_text1.place(x=380, y=165, height=25, width=90)
self.result_birth = StringVar()
self.lable_birth= Label(self.win, text='出生日期:', font=('微软雅黑', 14, 'bold'), fg="navy", bg="lightblue")
self.lable_birth.place(x=280, y=210)
self.result_birth.set("")
self.entry_text2= Entry(self.win, state=DISABLED,textvariable=self.result_birth)
self.entry_text2.place(x=380, y=215, height=25, width=210)
self.reselt_address = StringVar()
self.lable_address = Label(self.win, text=' 所在地:', font=('微软雅黑', 14, 'bold'), fg="navy", bg="lightblue")
self.lable_address.place(x=280, y=260)
self.reselt_address.set("")
self.entry_text3 = Entry(self.win, state=DISABLED,textvariable=self.reselt_address)
self.entry_text3.place(x=380, y=275, height=25, width=210)
self.button_quit = Button(self.win, text='关闭', command=self.Id_quit,font=('微软雅黑', 12, 'bold'), fg='navy')
self.button_quit.place(x=530, y=330, width=60,height=30)
def show(self):
self.win.mainloop()
def get_info(self):
pass
def Id_quit(self):
self.win.quit()
def Id_birth(self):
try:
list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'X']
si_list = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2]
si_list1 = ['1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2']
dt = {}
with open('id_sql.txt', 'r') as dict_file:
for line in dict_file:
(key, value) = line.strip().split(' ')
dt[key] = value
a = self.result_exits.get()
year = a[6:10]
month = a[10:12]
day = a[12:14]
sex = a[16]
address = a[0:6]
number = a[0:17]
of_number = 0
for i in a:
if i in list and len(a) == 18:
try:
if address in dt:
for x in dt:
if x == address:
self.reselt_address.set(dt[x])
break
continue
else:
year = 0
b = time.mktime(datetime.datetime(int(year), int(month), int(day)).timetuple())
y = time.mktime(datetime.datetime(1970, 1, 1, 8, 00).timetuple())
x = time.time()
if b >= y and b <= x:
self.result_true.set('有效')
self.result_birth.set("%s-%s-%s"%(year,month,day))
if int(sex) == 0 and int(sex) % 2 == 0:
self.result_sex.set("女")
else:
self.result_sex.set("男")
else:
self.result_true.set('无效')
except:
self.enabled()
else:
self.enabled()
for i in range(len(number)):
of_number += int(number[i]) * int(si_list[i])
of_number = of_number % 11
if a[17:] != si_list1[of_number]:
self.enabled()
except:
self.enabled()
def enabled(self):
self.result_true.set('无效')
self.result_sex.set("")
self.result_birth.set("")
self.reselt_address.set("")
if __name__ == '__main__':
long = IDCCheck()
long.show()
| [
"noreply@github.com"
] | han925981034.noreply@github.com |
fc0ba945af6ee21809c0c81d4d2d1d2a8b103254 | 79a3cc6868730fd5fb748dfc2c4430e6ac52b26c | /Weblogic-Deployment/roles/Application-2/templates/start-managed-servers.py | c8b0cc2118d2516e2f2ab9e670ed6b611d2b87bd | [
"BSD-3-Clause"
] | permissive | rgaddam/Weblogic-Deployement-Using-Ansible | 6b431e9238f68ba82da5d03ac3f1460ac617c891 | 0e16d4dd4263da6fd604d231b190a5fd21bc36a1 | refs/heads/master | 2022-04-06T03:38:18.851761 | 2020-02-28T21:58:23 | 2020-02-28T21:58:23 | 70,837,199 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 492 | py | # Set AdminServer connection URL
ADMIN_SERVER_URL = 't3://' + '{{ admin_server_hostname }}' + ':' + '{{ admin_server_port }}';
# Connect to the AdminServer
connect('{{ weblogic_admin }}', '{{ weblogic_admin_pass }}', ADMIN_SERVER_URL);
# Start editing mode
edit();
startEdit();
# Start Managed Server-1
start('{{ managed_server_name-1 }}', block='false');
# Start Managed Server-2
start('{{ managed_server_name-2 }}', block='false');
# Disconnect from AdminServer
disconnect();
exit();
| [
"noreply@github.com"
] | rgaddam.noreply@github.com |
de0872015a00bf7aa5c3e151ddeecb83fe00d452 | 86e25f7afc5e5ca1194512b79b1e581c1a682946 | /container.py | 18bb431ac148799c895e0f70580133153fa02f99 | [] | no_license | Vlad-Iliescu/kinetic-python | 85b7442326d4814175acdf333453c0f65d85ef61 | 4b8d1b0680e410d0533e76a6edcd3fd44934a25d | refs/heads/master | 2016-09-05T23:19:25.324952 | 2012-09-07T12:07:13 | 2012-09-07T12:07:13 | 32,134,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,854 | py | __author__ = 'Vlad'
from node import Node
from util.global_options import Kinetic, write_output
class Container(Node):
def __init__(self, var_name=None, **kwargs):
super(Container, self).__init__(**kwargs)
self._parse_container_config(kwargs)
self.name = var_name
self.children = []
if self.name:
self._make_constructor()
@write_output
def add(self, child):
"""Add node to container"""
# child = Node()
# child._id = Kinetic.Global.id_counter
# Kinetic.Global.id_counter += 1
child.index = len(self.children)
child.parent = self
self.children.append(child)
stage = child.get_stage()
if not stage:
Kinetic.Global._add_temp_node(child)
else:
stage._add_id(child)
stage._add_name(child)
go = Kinetic.Global
go._pull_nodes(stage)
if hasattr(self, '_add'):
self._add(child)
return '%s.add(%s);' %(self.name, child.name)
def get(self, selector):
"""
Return an array of nodes that match the selector.
Use '#' for id selectionsand '.' for name selections ex:
var node = stage.get('#foo'); // selects node with id foo
var nodes = layer.get('.bar'); // selects nodes with name bar inside layer
"""
stage = self.get_stage()
arr = []
key = selector[1:]
if selector[0] == '#':
arr = [stage.ids[key]] if stage.ids[key] else []
elif selector[0] == '.':
arr = [stage.name[key]] if stage.name[key] else []
elif selector == 'Shape' or selector == 'Group' or selector == 'Layer':
return self._get_nodes(selector)
else:
return []
retr_arr = []
for n in xrange(len(arr)):
node = arr[n]
if self.is_ancestor_of(node):
retr_arr.append(node)
return retr_arr
def get_children(self):
"""Get children"""
return self.children
def get_intersections(self, x, y):
"""Get shapes that intersect a point"""
arr = []
shapes = self.get('Shapes')
for n in xrange(len(shapes)):
shape = shapes[n]
if shape.is_visible() and shape.intersects(x, y):
arr.append(shape)
return arr
def is_ancestor_of(self, node):
"""Determine if node is an ancestor of descendant"""
if self.node_type == 'Stage':
return True
parent = node.get_parent()
while parent:
if id(parent) == id(self):
return True
parent = parent.get_parent()
return False
@write_output
def remove(self, child):
"""Remove child from container"""
if child and child.index is not None and id(self.children[child.index]) == id(child):
stage = self.get_stage()
if stage:
stage._remove_id(child.get_id())
stage._remove_name(child.get_name())
Kinetic.Global._remove_temp_node(child)
del self.children[child.index]
self._set_children_indices()
while child.children and len(child.children) > 0:
child.remove(child.children)
if hasattr(child, '_remove'):
child._remove()
return '%s.remove(%s);' %(self.name, child.name)
@write_output
def remove_children(self):
"""Remove all children"""
while len(self.children) > 0:
self.remove(self.children[0])
return '%s.removeChildren();' %self.name
def _parse_container_config(self, kwargs):
if 'alpha' in kwargs:
self.attrs.alpha = round(kwargs['alpha'], 2)
def _get_nodes(self, selector):
"""Get all shapes inside container"""
arr = []
def traverse(cont):
children = cont.get_children()
for n in xrange(len(children)):
child = children[n]
if child.node_type == selector:
arr.append(child)
elif child.node_type != 'Shape':
traverse(child)
traverse(self)
return arr
def _set_children_indices(self):
for n in xrange(len(self.children)):
self.children[n].index = n
if __name__ == '__main__':
c = Container('container',**{'alpha': 1})
from util.global_options import write_to_file
from node import Node
n1 = Node('node1', x=10)
n2 = Node('node2')
c.add(n1)
c.add(n2)
n2.move_down()
write_to_file('kin.js')
| [
"Vlad.Iliescu.88@gmail.com@42f960a1-aad4-b08e-6b72-5efd33d37d32"
] | Vlad.Iliescu.88@gmail.com@42f960a1-aad4-b08e-6b72-5efd33d37d32 |
81ca8e11005c1feb11cd0cbc18f5ea33860d493a | 8e6d78920b4d5ed60616d69cfba1824afeff611b | /backend/backend/urls.py | c0a2bb028b4fe8a80c5860a450aa2c4cfc99f5f3 | [] | no_license | shiowwj/django-react | 1c711b9bc879f13e99d1e6bc2c1e56d85a12729b | b381d48e6574eb134b8b530a758b6f7ea0719914 | refs/heads/master | 2023-01-06T07:29:36.168514 | 2019-06-19T10:04:26 | 2019-06-19T10:04:26 | 192,477,199 | 0 | 0 | null | 2023-01-04T00:45:44 | 2019-06-18T06:14:31 | JavaScript | UTF-8 | Python | false | false | 1,406 | py | """backend URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from rest_framework import routers
from todo import views
# from shopping import views
print ('hello',views)
router = routers.DefaultRouter()
print('OVHER HERE', router)
router.register(r'todos', views.TodoView, 'todo')
router.register(r'shoppings', views.ShoppingView, 'shopping')
#this step completes the building of the API.WE CAN NOW PERFORM CRUD operations on the Todo model.
#router class allows us to make the following queries:
#/todos/ -> returns a list of all the Todo items (create and read can be done here)
#/todos/id -> returns a single Todo item using id primary key (update and delete done here)
urlpatterns = [
path('admin/', admin.site.urls), path('api/', include(router.urls))
] | [
"shiowweijun@gmail.com"
] | shiowweijun@gmail.com |
01ed533f6c9a4859a87371bd27fea296ed647fc0 | e58b0929819446d737a3cf78ea39893036c7be38 | /subcmds/diff.py | 9045edc7e7a370bc23b714fbe73ff0900548f491 | [
"Apache-2.0"
] | permissive | bgabrhelik/git-repo | d732820854a7816860a5ccb968ecbfe08cf2b5d1 | c326834397ca0afe0b0a66728a18881005858f66 | refs/heads/master | 2020-12-26T00:07:57.899455 | 2015-03-26T17:00:40 | 2015-03-26T17:00:40 | 33,419,452 | 1 | 1 | null | 2015-04-04T20:34:41 | 2015-04-04T20:34:41 | null | UTF-8 | Python | false | false | 1,472 | py | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from command import PagedCommand
class Diff(PagedCommand):
common = True
helpSummary = "Show changes between commit and working tree"
helpUsage = """
%prog [<project>...]
The -u option causes '%prog' to generate diff output with file paths
relative to the repository root, so the output can be applied
to the Unix 'patch' command.
"""
def _Options(self, p):
def cmd(option, opt_str, value, parser):
setattr(parser.values, option.dest, list(parser.rargs))
while parser.rargs:
del parser.rargs[0]
p.add_option('-u', '--absolute',
dest='absolute', action='store_true',
help='Paths are relative to the repository root')
def Execute(self, opt, args):
for project in self.GetProjects(args):
project.PrintWorkTreeDiff(opt.absolute)
| [
"matthias.putz@esrlabs.com"
] | matthias.putz@esrlabs.com |
3a19ba528e7e6cf2e1c340ffc2f61d93a9766961 | 306dc56bdbe12120541ec0ad80cf6e428ded2d0c | /LastQue!/connect5.py | 429c692dd7908cb47a186cbe5fe6768e615c2ecd | [] | no_license | DevTotti/Challenges | bf286b11a5405c0ae8853e00b6c4024149a77be8 | 448521de19d48ad7d061ae33c87838d661c67868 | refs/heads/master | 2020-06-21T00:37:15.330204 | 2019-07-17T02:39:14 | 2019-07-17T02:39:14 | 197,300,585 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,511 | py | """
Name: Osuntolu Paul
email: neptunecody@gmail.com
Intermediate test question 5 & 6
"""
import mechanicalsoup
import re
import json
import requests
from statistics import mean
import scipy
import scipy.stats as st
from scipy.spatial.distance import cdist
from scipy.interpolate import *
import numpy as np
s=requests.Session()
#load the facebook cookie json data
with open('facebook_cookie.json') as fb:
load = json.load(fb)
#start by taking the female friend
#define a function to for the female friends
def get_female_friends():
dictt = {}
female_friends = []
for cookie in load:
s.cookies.set(cookie['name'], cookie['value'])
url = 'https://web.facebook.com/search/100002743322368/friends/intersect/females/intersect?_rdc=1&_rdr'
browser = mechanicalsoup.StatefulBrowser(session=s)
browser.open(url)
f_page = str(browser.get_current_page())
f_page = f_page.split('class="_ajw"')
for i in f_page:
match_name = re.compile(r'EntRegularPersonalUser"><span>([a-zA-Z0-9\-\s]+)')
names = match_name.search(i)
details = re.compile(r'show="1">([a-zA-Z0-9.\-\s]+)')
details = details.search(i)
in_fo = re.compile(r'class="_52eh">([a-zA-Z0-9.\-\s]+)')
info = in_fo.search(i)
if names == None :
continue
else:
dictt["name"] = names.group(1)
dictt["details"] = details.group(1)
#dictt["info"] = info.group(1)
female_friends.append(dictt.copy())
ff = open('female_friends.json','wb')
json.dump(female_friends, ff)
friendsfemale = len(dictt["name"])
return friendsfemale
#get the data for the pages liked by user
#define a function for pages liked
def get_pages_liked():
dictt = {}
pages_likes = []
for cookie in load:
s.cookies.set(cookie['name'], cookie['value'])
url = 'https://web.facebook.com/search/100002743322368/pages-liked?_rdc=1&_rdr'
browser = mechanicalsoup.StatefulBrowser(session=s)
browser.open(url)
l_page = str(browser.get_current_page())
l_page = l_page.split('class="_ajw"')
for i in l_page:
match_name = re.compile(r'EntConcreteOwnedPage"><span>([a-zA-Z0-9\-\s]+)')
p_names = match_name.search(i)
likes = re.compile(r'class="_glm">([a-zA-Z0-9.\-\s]+)')
likes = likes.search(i)
detail = re.compile(r'class="_52eh"><span>([a-zA-Z0-9.\-\s]+)')
detail = detail.search(i)
if p_names == None:
continue
else:
dictt["name"] = p_names.group(1)
#dictt["likes"] = likes.group(1)
#dictt["detail"] = detail.group(1)
pages_likes.append(dictt.copy())
pl = open('pages_likes.json','wb')
json.dump(pages_likes, pl)
#pages_liked = len(dictt['name'])
return pl
def get_mutual_friends():
dictt = {}
mutual_frn = []
for cookie in load:
s.cookies.set(cookie['name'], cookie['value'])
url = 'https://web.facebook.com/search/100002743322368/friends/intersect/females/intersect?_rdc=1&_rdr'
browser = mechanicalsoup.StatefulBrowser(session=s)
browser.open(url)
mutual = str(browser.get_current_page())
mutual = mutual.split('class="_ajw"')
for i in mutual:
match_name = re.compile(r'EntRegularPersonalUser"><span>([a-zA-Z0-9\-\s]+)')
m_names = match_name.search(i)
m_details = re.compile(r'show="1">([a-zA-Z0-9.\-\s]+)')
m_details = m_details.search(i)
m_in_fo = re.compile(r'class="_52eh">([a-zA-Z0-9.\-\s]+)')
m_info = m_in_fo.search(i)
if m_names == None :
continue
else:
dictt["name"] = m_names.group(1)
#dictt["info"] = m_info.group(1)
dictt["details"] = m_details.group(1)
mutual_frn.append(dictt.copy())
mf = open('mutual_friends.json','wb')
json.dump(mutual_frn, mf)
mutual_friends = len(dictt["name"])
return mutual_friends
mutual_friends=get_mutual_friends()
pl=get_pages_liked()
friendsfemale=get_female_friends
def confidence_interval(loads, confidence = 0.78):
#loads = (get_pages_liked, get_female_friends, get_mutual_friends)
a = 1.0*(np.array(loads))
n = len(a)
loc, scale = np.mean(a), st.sem(a)
chosen_range = (45, 60)
confidence_interval(chosen_range)
st.t.interval(0.78, len(a)-1,loc=np.mean,scale=st.sem(a))
loads = (friendsfemale, pl, mutual_friends)
confidence_interval(loads, confidence=0.78)
| [
"paul.bIncoM@gmail.com"
] | paul.bIncoM@gmail.com |
c08a06098466014eebcd6ca0f27fc1259e9c1c1a | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /directconnect_write_f/virtual-interface_delete.py | 80e3a4f24ad845af820b46f860e9d930d4b0b178 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
associate-virtual-interface : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/directconnect/associate-virtual-interface.html
describe-virtual-interfaces : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/directconnect/describe-virtual-interfaces.html
"""
write_parameter("directconnect", "delete-virtual-interface") | [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
07493ee2b882c3363f6d52bd9562f1c312196c56 | 99493b50c8be7e88090c2c5d9a9070cf33f15f3a | /小姐姐带你学自动化测试/01python/02 条件语句.py | fa9110454d699d20c423f1dc3b9116ce5e7566ed | [] | no_license | Saber1342/python | 42d2585c4e39f5b1ddca781881e76733b7d21918 | d714e6c273009bf3b9a6013ae788d8145f083f1b | refs/heads/master | 2020-12-19T11:54:55.776850 | 2020-01-23T04:59:08 | 2020-01-23T04:59:08 | 235,724,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | #该实例演示数字猜谜游戏
guess = 0
number = 5
print("猜数字游戏")
while guess != number:
guess = int(input("请输入你猜的数字:"))
if guess == number:
print("恭喜,你猜对了!")
elif guess < number:
print("在猜哦,猜的数字小了")
elif guess > number:
print("在猜哦,猜的数字大了") | [
"549755670@qq.com"
] | 549755670@qq.com |
6a72e0a4d419c5ee1caa5dbd06bbe9167e2bf1dd | 341662c6723e379d37eac840e3d58c2c2b8e303c | /solutions/euler6.py | 253ae8671bd2fd7116a37c0b7437bb32171d1b07 | [] | no_license | jimmyhmiller/project-euler | 571ae047b2ce66324bc407724cffefe788649c06 | 124b7b4aa3d022c3072a92f1cec76011ced866a5 | refs/heads/master | 2020-05-22T01:09:21.139071 | 2017-03-28T05:24:38 | 2017-03-28T05:24:38 | 15,573,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | #!/usr/bin/env python
def main():
sum_of_squares = sum(x**2 for x in range(1,101))
square_of_sums = sum(range(1,101))**2
print abs(sum_of_squares - square_of_sums)
if __name__ == '__main__':
main() | [
"nomicness@gmail.com"
] | nomicness@gmail.com |
92d4201ea1b976e5fadadf15d8d197cdf4270b45 | 13c03c269c3ed7a82e7771c5eafcaf8d4d46fa46 | /weather_DL.py | c4670b23961f46ee4ad77d0c641f80340ebfb6ff | [] | no_license | ncsa/CPRHD_SES | 533d1f1177368ccbd67588d21913863cafd7442c | 3168044825d74f776b4828cc43936eacf6859f18 | refs/heads/master | 2021-06-28T15:43:33.373380 | 2020-12-18T20:38:44 | 2020-12-18T20:38:44 | 191,447,999 | 1 | 1 | null | 2019-10-16T19:58:27 | 2019-06-11T20:56:00 | Jupyter Notebook | UTF-8 | Python | false | false | 972 | py | import urllib.request
import json
from datetime import datetime
from datetime import date
def download_weather_climate_division(date_to_start: str):
base_url = "http://data.rcc-acis.org/GridData"
start_date = datetime.strptime(date_to_start, '%Y%m%d').strftime("%Y%m%d")
end_date = datetime.today().strftime('%Y%m%d')
params_mint = {"state": "IL",
"sdate": start_date,
"edate": end_date,
"grid": 21,
"elems": [{"name": "mint", "area_reduce": "county_mean"}]}
headers = {"Content-Type": 'application/json'}
data = json.dumps(params_mint).encode('utf-8')
try:
req = urllib.request.Request(base_url, data, headers)
with urllib.request.urlopen(req) as response:
the_page = response.read()
mint_json = json.loads(the_page)['data']
except Exception as e:
print(e)
download_weather_climate_division('20190101')
| [
"jallen17@illinois.edu"
] | jallen17@illinois.edu |
08fed6bd9d3495b53532ebd0d362e54a2feacc00 | 68d0775ff5ec73901838137725642f83252c6130 | /src/python_tools/Spacecraft.py | c53b7e49e61625bca9125d014a670e3be4bd9800 | [] | no_license | bryanwweber/AWP | 3506aefc655288a6ab35b0f7e88a2fc0360eb78d | 0a316fe82cd61751a0acf5e58c966f54367b6970 | refs/heads/main | 2023-06-09T09:50:02.970273 | 2021-07-03T01:27:56 | 2021-07-03T01:27:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,744 | py | '''
AWP | Astrodynamics with Python by Alfonso Gonzalez
https://github.com/alfonsogonzalez/AWP
https://www.youtube.com/c/AlfonsoGonzalezSpaceEngineering
Spacecraft class definition
'''
# Python standard libraries
import os
import math as m
# 3rd party libraries
import numpy as np
import matplotlib.pyplot as plt
plt.style.use( 'dark_background' )
from scipy.integrate import ode
import spiceypy as spice
# AWP libraries
import orbit_calculations as oc
import numerical_tools as nt
import plotting_tools as pt
import planetary_data as pd
import spice_data as sd
def null_config():
return {
'cb' : pd.earth,
'date0' : '2021-04-01',
'et0' : None,
'frame' : 'J2000',
'dt' : 100,
'orbit_state' : [],
'coes' : [],
'orbit_perts' : {},
'propagator' : 'lsoda',
'stop_conditions': {},
'print_stop' : True,
'mass0' : 0,
'output_dir' : '.',
'propagate' : True
}
class Spacecraft:
def __init__( self, config ):
self.config = null_config()
for key in config.keys():
self.config[ key ] = config[ key ]
self.orbit_perts = self.config[ 'orbit_perts' ]
self.cb = self.config[ 'cb' ]
if self.config[ 'coes' ]:
self.config[ 'orbit_state' ] = oc.coes2state( self.config[ 'coes' ],
mu = self.config[ 'cb' ][ 'mu' ] )
if type( self.config[ 'tspan' ] ) == str:
self.config[ 'tspan' ] = float( self.config[ 'tspan'] ) *\
oc.state2period( self.config[ 'orbit_state' ], self.cb[ 'mu' ] )
self.steps = int(
np.ceil( self.config[ 'tspan' ] / self.config[ 'dt' ] ) + 1 )
self.step = 1
self.ets = np.zeros( ( self.steps, 1 ) )
self.states = np.zeros( ( self.steps, 7 ) )
self.alts = np.zeros( ( self.steps, 1 ) )
self.states[ 0, :6 ] = self.config[ 'orbit_state' ]
self.states[ 0, 6 ] = self.config[ 'mass0' ]
self.alts [ 0 ] = nt.norm( self.states[ 0, :3 ] ) -\
self.cb[ 'radius' ]
self.assign_stop_condition_functions()
self.assign_orbit_perturbations_functions()
self.load_spice_kernels()
if not os.path.exists( self.config[ 'output_dir' ] ):
os.mkdir( self.config[ 'output_dir' ] )
self.solver = ode( self.diffy_q )
self.solver.set_integrator( self.config[ 'propagator' ] )
self.solver.set_initial_value( self.states[ 0, : ], 0 )
self.coes_calculated = False
self.latlons_calculated = False
if self.config[ 'propagate' ]:
self.propagate_orbit()
def assign_stop_condition_functions( self ):
self.stop_conditions_map = {
'max_alt' : self.check_max_alt,
'min_alt' : self.check_min_alt,
'exit_SOI' : self.check_exit_SOI,
'enter_SOI': self.check_enter_SOI
}
self.stop_condition_functions = [ self.check_deorbit ]
for key in self.config[ 'stop_conditions' ].keys():
self.stop_condition_functions.append(
self.stop_conditions_map[ key ] )
def assign_orbit_perturbations_functions( self ):
self.orbit_perts_funcs_map = {
'J2': self.calc_J2
}
self.orbit_perts_funcs = []
for key in self.config[ 'orbit_perts' ]:
self.orbit_perts_funcs.append(
self.orbit_perts_funcs_map[ key ] )
def load_spice_kernels( self ):
spice.furnsh( sd.leapseconds_kernel )
self.spice_kernels_loaded = [ sd.leapseconds_kernel ]
if self.config[ 'et0' ] is not None:
self.et0 = self.config[ 'et0' ]
else:
self.et0 = spice.str2et( self.config[ 'date0' ] )
self.ets = np.arange( self.et0,
self.et0 + self.config[ 'tspan' ] + self.config[ 'dt' ],
self.config[ 'dt' ] )
def check_deorbit( self ):
if self.alts[ self.step ] < self.cb[ 'deorbit_altitude' ]:
if self.config[ 'print_stop' ]:
self.print_stop_condition( 'deorbit altitude' )
return False
return True
def check_max_alt( self ):
if self.alts[ self.step ] > self.config[ 'stop_conditions' ][ 'max_alt' ]:
if self.config[ 'print_stop' ]:
self.print_stop_condition( 'max altitude' )
return False
return True
def check_min_alt( self ):
if self.alts[ self.step ] > self.config[ 'stop_conditions' ][ 'min_alt' ]:
if self.config[ 'print_stop' ]:
self.print_stop_condition( 'min altitude' )
return False
return True
def check_exit_SOI( self ):
if nt.norm( self.states[ self.step, :3 ] ) > self.cb[ 'SOI' ]:
if self.config[ 'print_stop' ]:
self.print_stop_condition( '%s SOI exit' % self.cb[ 'name' ] )
return False
return True
def check_enter_SOI( self ):
body = self.config[ 'stop_conditions' ][ 'enter_SOI' ]
r_cb2body = spice.spkgps(
body[ 'SPICE_ID' ], self.ets[ self.step ],
self.config[ 'frame' ], self.cb[ 'SPICE_ID' ] )[ 0 ]
r_sc2body = r_cb2body - self.states[ self.step, :3 ]
if nt.norm( r_sc2body ) < body[ 'SOI' ]:
self.print_stop_condition( '%s SOI entry' % body[ 'name' ] )
return False
return True
def print_stop_condition( self, parameter ):
print( f'Spacecraft has reached {parameter}.' )
def check_stop_conditions( self ):
for stop_condition in self.stop_condition_functions:
if not stop_condition():
return False
return True
def calc_J2( self, et, state ):
z2 = state[ 2 ] ** 2
norm_r = nt.norm( state[ :3 ] )
r2 = norm_r ** 2
tx = state[ 0 ] / norm_r * ( 5 * z2 / r2 - 1 )
ty = state[ 1 ] / norm_r * ( 5 * z2 / r2 - 1 )
tz = state[ 2 ] / norm_r * ( 5 * z2 / r2 - 3 )
return 1.5 * self.cb[ 'J2' ] * self.cb[ 'mu' ] *\
self.cb[ 'radius' ] ** 2 \
/ r2 ** 2 * np.array( [ tx, ty, tz ] )
def diffy_q( self, et, state ):
rx, ry, rz, vx, vy, vz, mass = state
r = np.array( [ rx, ry, rz ] )
v = np.array( [ vx, vy, vz ] )
norm_r = nt.norm( r )
mass_dot = 0.0
state_dot = np.zeros( 7 )
et += self.et0
a = -r * self.cb[ 'mu' ] / norm_r ** 3
for pert in self.orbit_perts_funcs:
a += pert( et, state )
state_dot[ :3 ] = v
state_dot[ 3:6 ] = a
state_dot[ 6 ] = mass_dot
return state_dot
def propagate_orbit( self ):
print( 'Propagating orbit..' )
while self.solver.successful() and self.step < self.steps:
self.solver.integrate( self.solver.t + self.config[ 'dt' ] )
self.states[ self.step ] = self.solver.y
self.alts [ self.step ] = nt.norm( self.solver.y[ :3 ] ) -\
self.cb[ 'radius' ]
if self.check_stop_conditions():
self.step += 1
else:
break
self.ets = self.ets [ :self.step ]
self.states = self.states[ :self.step ]
self.alts = self.alts [ :self.step ]
def calc_coes( self ):
print( 'Calculating COEs..' )
self.coes = np.zeros( ( self.step, 6 ) )
for n in range( self.step ):
self.coes[ n, : ] = oc.state2coes(
self.states[ n, :6 ], mu = self.cb[ 'mu' ] )
self.coes_rel = self.coes[ : ] - self.coes[ 0, : ]
self.coes_calculated = True
def calc_apoapses_periapses( self ):
if not self.coes_calculated:
self.calc_coes()
self.apoapses = self.coes[ :, 0 ] * ( 1 + self.coes[ :, 1 ] )
self.periapses = self.coes[ :, 0 ] * ( 1 - self.coes[ :, 1 ] )
def plot_3d( self, args = { 'show': True } ):
pt.plot_orbits( [ self.states[ :, :3 ] ], args )
def plot_groundtracks( self, args = { 'show': True } ):
if not self.latlons_calculated:
self.calc_latlons()
pt.plot_groundtracks( [ self.latlons[ : ] ], args )
def plot_coes( self, args = { 'show': True }, step = 1 ):
if not self.coes_calculated:
self.calc_coes()
pt.plot_coes( self.ets[ ::step ], self.coes[ ::step ], args )
| [
"alfonso8gonzalez@gmail.com"
] | alfonso8gonzalez@gmail.com |
d34d9478e0a08733cd8446aad7f2470cd93da574 | eb267b933b04739f05c9aff83698e70f1d5e3dce | /telegram.py | 0313ffc7a8f75e64754e0ac8bd785065e30e91e3 | [] | no_license | Tabaktom/StockTips | 3f9284910d9c0b5829aa91e93855d47ceb0f1ee3 | 95d4697eb05a55977bc4a69a69f6da8842daf3d7 | refs/heads/master | 2022-12-02T20:47:04.230078 | 2020-08-22T16:48:59 | 2020-08-22T16:48:59 | 289,527,391 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 471 | py |
def share_telegram(message):
import telepot
from secrets import telegram_chat_token
token = telegram_chat_token
botname = 'BOT_NAME'
TelegramBot = telepot.Bot(token)
#bot_id = TelegramBot.getMe()['id']
#print(bot_id)
bot_id='BOT_ID'
updates = TelegramBot.getUpdates()
send_id='CHAT_ID'
#send_id = updates[1]['message']['chat']['id']
#print(TelegramBot.getChat('StockTips'))
TelegramBot.sendMessage(send_id, message)
| [
"noreply@github.com"
] | Tabaktom.noreply@github.com |
bb9ab0f73f6c25d085278975445f6e1ad3916472 | 6675cc79f2bad3cf8d4bbb4517c4660a14dcd425 | /Spotify/extractdata.py | 2c9c46d60ae38f4e6a01ee72b839fbe0123b7c9a | [] | no_license | minh-le1994/MinhLe | 8390a85c014421922cc0374475ce72a634e269a8 | b9392b1cffe41486a9f296d83f11123c5bded8cb | refs/heads/master | 2022-12-11T23:10:14.188875 | 2020-07-12T16:00:43 | 2020-07-12T16:00:43 | 250,595,997 | 0 | 0 | null | 2022-09-23T22:39:29 | 2020-03-27T17:11:56 | HTML | UTF-8 | Python | false | false | 3,290 | py | import requests
import pandas as pd
import json
from authorisation import SpotifyAuthorisation
class DataExtracter():
"""
Class to extract the data from the Spotify Web Api regarding the last 50 songs played and the audio features of this songs. The
class is initiated with the token needed to access the spotify web api
"""
def __init__(self, token):
self.base_query = "https://api.spotify.com/v1"
self.token = token
#Read last fifty songs
def get_recent_songs(self):
"""
Sends a request to the spotify web api and returns the last 50 played songs from the respective user.
"""
endpoint = "/me/player/recently-played"
params = {"limit": 50}
header = {"Authorization": "Bearer {}".format(self.token)}
response = requests.get("{}{}".format(self.base_query, endpoint),
params = params,
headers = header
)
print("Song History Request Status: {}".format(response.status_code))
return response
def get_song_properties(self, spotify_ids:list):
"""
Returns the song audio features given an list of spotify ids
"""
endpoint = "audio-features"
response = requests.get("{}/{}".format(self.base_query, endpoint),
params = {"ids": ",".join(spotify_ids)},
headers = {"Authorization": "Bearer {}".format(self.token)})
print("Song Properties Request Status: {}".format(response.status_code))
return response
def extract_data(self):
"""
Extract the recently last 50 songs and the audio features to return it as a pandas DataFrame
"""
response = self.get_recent_songs()
dic = {"timestamp": [], "name": [], "id": [], "uri": [], "popularity": [], "object_type": [], "artist": [], "album": []}
for element in response.json()["items"]:
dic["timestamp"].append(element["played_at"])
dic["name"].append(element["track"]["name"])
dic["id"].append(element["track"]["id"])
dic["uri"].append(element["track"]["uri"])
dic["object_type"].append(element["context"]["type"])
dic["popularity"].append(element["track"]["popularity"])
dic["album"].append(",".join([artist["name"] for artist in element["track"]["artists"]]))
dic["artist"].append(element["track"]["album"]["name"])
keys = ["danceability", "energy", "key", "loudness", "mode", "speechiness", "acousticness", "instrumentalness", "liveness",
"valence", "tempo", "duration_ms", "time_signature", "id", "uri"]
response = self.get_song_properties(dic["id"])
for key in keys:
dic[key] = []
for element in response.json()["audio_features"]:
#if key is not inside than append just a zero
for key in keys:
try:
dic[key].append(element[key])
except:
dic[key].append(0)
self.song_data = pd.DataFrame(dic)
return self.song_data | [
"khac.minh.le@outlook.de"
] | khac.minh.le@outlook.de |
5466f06013d91cbc89027ba74e9665d8a1e56a94 | 4726d52edb8a5f091d2bd52acec561d77fb5f73d | /backend/rest/invoiceDetails/migrations/0002_auto_20201117_1158.py | a18e699d67ce0291aeb9af72f0817e012437d818 | [] | no_license | ChristianTaborda/Hyperfoods | 5d34252c1c3f91a7d899cb842007427007d3ddc3 | 47ac16b6adca28f02ba0f227acae431daeda1304 | refs/heads/master | 2023-02-06T12:00:53.884171 | 2020-12-30T23:11:41 | 2020-12-30T23:11:41 | 325,666,350 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 826 | py | # Generated by Django 3.1 on 2020-11-17 16:58
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('combos', '0002_combo_imagecombo'),
('products', '0003_auto_20201116_2124'),
('invoiceDetails', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='invoicedetail',
name='comboInvoiceDetail',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='combos.combo'),
),
migrations.AlterField(
model_name='invoicedetail',
name='productInvoiceDetail',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='products.product'),
),
]
| [
"christian.taborda@correounivalle.edu.co"
] | christian.taborda@correounivalle.edu.co |
4aa6bd7f048cdaa6fcb9c0294eb8ec82785ce637 | a3e1bf40dc7b24e559ab69fbb50fa9724b4c3e20 | /scorecenter/scorecenter/settings.py | 0ddcd1102022047bfb894c3b518de21b090cfc8f | [] | no_license | andresmacha/Quiniela | e260067f80db94f023168e6073d847b52c371afe | a5e947cca473b57ff757fc677995ab2b3f03f306 | refs/heads/master | 2016-09-05T14:38:44.542513 | 2013-02-12T14:58:51 | 2013-02-12T14:58:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,646 | py | # Django settings for scorecenter project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'champions', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': '132804', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '/Users/AndresMacha/Documents/Ejemplos/scorecenter/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'pu4-otij(8qsr948-@_d(8$n@oyn!492#rlk)wz=+tezr5#y0#'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#'django.middleware.csrf.CsrfMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
#'django.contrib.csrf.middleware.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
#'django.middleware.csrf.',
#'django.contrib.csrf.middleware.CsrfResponseMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleCsrfResponseMiddlewareware',
)
ROOT_URLCONF = 'scorecenter.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'scorecenter.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
STATIC_ROOT + 'templates/',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'JuegoApp',
'PrediccionApp',
'django.contrib.auth',
'django.contrib.sessions',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"andresalejandro13@hotmail.com"
] | andresalejandro13@hotmail.com |
6e2d152884470b76fab1e4f8be0c8476ae6e0fb1 | f29e8c30b9f7b66cb66bfb634608adec74a4aee0 | /012desafio - ler preco, retorna desconto.py | f26b56ea286e0bc9a308999fc0202fc8be53aad4 | [] | no_license | brunoparodi/Curso-GUANABARA | acafe1390ccd2ba5648ca30f73f54b95a6c57201 | 16b7a293a54f1a471fa07830bc66709a88fceb79 | refs/heads/master | 2020-04-24T09:16:12.095977 | 2019-02-21T11:17:01 | 2019-02-21T11:17:01 | 171,857,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | preco = float(input('Informe o preço do produto: R$'))
desconto = preco * (5 / 100)
print('O preço com desconto é: R${:.2f}.'.format(preco - desconto))
| [
"bruno@parodi.com.br"
] | bruno@parodi.com.br |
de8a2eb24ef3c4273c3735cc6a55be8ff4758247 | 347ba5b3b5c171be17bc201d87d580b1b0a8e59c | /templates/test.py | fddb06cd2dcafdbe7f13ed21118d41b0d10c5567 | [] | no_license | hf618/Task2 | 589be10b66325ed538ed98542d618149682477f3 | e4597bc4ee4998cfa2e9abcf32c327dd8a75619a | refs/heads/master | 2023-09-01T22:27:41.845475 | 2021-10-08T17:00:12 | 2021-10-08T17:00:12 | 415,053,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | from database import * # 自己写的数据库函数的包
import random
import copy
import numpy as np
a=['kim','hwang','lee','ye','wu']
#a = gain_namelist() # 名字列表
ori = a #原始数据
random.shuffle(a) # 随机排序后名字列表
print(a)
baocun = copy.deepcopy(a)
a.pop(1)
print(a)
print(baocun)
samples = random.sample(a, 3)
print(samples)
| [
"1208927463@qq.com"
] | 1208927463@qq.com |
faaabc87d530eda66341796909e94a28dc6d25c5 | b306aab9dcea2dd83dda700bc9f7b9f1a32cff3a | /CAIL2020/cocr/det_infer.py | a89f3a16932ef293bd9a8018db8f313597098ffd | [
"Apache-2.0"
] | permissive | Tulpen/CAIL | d6ca9981c7ea2603ae61675ba330a9614cd9398d | c4cfa98ab4ecedbce34a7a5a186830486047540c | refs/heads/master | 2023-04-23T20:07:56.774530 | 2021-04-16T13:18:36 | 2021-04-16T13:18:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,839 | py | import argparse
import os
import sys
import pathlib
import torch
from torch import nn
from torchvision import transforms
from torchocr.networks import build_model
from torchocr.datasets.det_modules import ResizeShortSize
from torchocr.postprocess import build_post_process
import cv2
from matplotlib import pyplot as plt
from torchocr.utils import draw_ocr_box_txt, draw_bbox
class DetInfer:
def __init__(self, model_path):
ckpt = torch.load(model_path, map_location='cpu')
cfg = ckpt['cfg']
self.model = build_model(cfg.model)
state_dict = {}
for k, v in ckpt['state_dict'].items():
state_dict[k.replace('module.', '')] = v
self.model.load_state_dict(state_dict)
self.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
self.model.to(self.device)
self.model.eval()
self.resize = ResizeShortSize(736, False)
self.post_proess = build_post_process(cfg.post_process)
self.transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=cfg.dataset.train.dataset.mean, std=cfg.dataset.train.dataset.std)
])
def predict(self, img, is_output_polygon=False):
# 预处理根据训练来
data = {'img': img, 'shape': [img.shape[:2]], 'text_polys': []}
data = self.resize(data)
tensor = self.transform(data['img'])
tensor = tensor.unsqueeze(dim=0)
tensor = tensor.to(self.device)
out = self.model(tensor)
box_list, score_list = self.post_proess(out, data['shape'], is_output_polygon=is_output_polygon)
box_list, score_list = box_list[0], score_list[0]
if len(box_list) > 0:
idx = [x.sum() > 0 for x in box_list]
box_list = [box_list[i] for i, v in enumerate(idx) if v]
score_list = [score_list[i] for i, v in enumerate(idx) if v]
else:
box_list, score_list = [], []
return box_list, score_list
def init_args():
import argparse
parser = argparse.ArgumentParser(description='PytorchOCR infer')
parser.add_argument('--model_path', required=False, type=str, help='rec model path', default=r'F:\CAIL\CAIL2020\cocr\model\db_ResNet50_vd_icdar2015withconfig.pth')
parser.add_argument('--img_path', required=False, type=str, help='img path for predict', default=r'F:\CAIL\CAIL2020\cocr\data\icdar2015\detection\test\imgs\img_2.jpg')
args = parser.parse_args()
return args
def resize(img, scale_percent = 60):
scale_percent = 60 # percent of original size
width = int(img.shape[1] * scale_percent / 100)
height = int(img.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image
resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
return resized
if __name__ == '__main__':
# ===> 获取配置文件参数
parser = argparse.ArgumentParser(description='train')
parser.add_argument('--config', type=str, default='config/det.json',
help='train config file path')
parser.add_argument('-m','--model_path', required=False, type=str, help='rec model path', default=r'F:\CAIL\CAIL2020\cocr\model\det-model.bin')
parser.add_argument('-i','--img_path', required=False, type=str, help='img path for predict', default=r'F:\CAIL\CAIL2020\cocr\data\t2\architecture (1).jpg')
args = parser.parse_args()
# for i in range(1,11):
img = cv2.imread(args.img_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
if img.shape[0] > 1500:
img = resize(img, img.shape[0]*100./1024)
model = DetInfer(args.model_path)
box_list, score_list = model.predict(img, is_output_polygon=True)
img = draw_ocr_box_txt(img, box_list)
img = draw_bbox(img, box_list)
plt.imshow(img)
plt.show()
| [
"bangtech@sina.com"
] | bangtech@sina.com |
1508d1b59f6898158e81fe6c5d740f5229815180 | 242f7b0ffeec430d67b262b724165fad343fda68 | /combinatory_analysis/comb-rep.py | 361f318f4be39a2236f0b7c25a0966d40a8e629b | [] | no_license | lnbe10/Math-Thinking-for-Comp-Sci | ea12dc212ea654b11e5499b317db37b1287bdea4 | c12f3b84c6831ef5a6bb02c04390223f17167e14 | refs/heads/main | 2023-02-26T13:50:13.755274 | 2021-02-05T07:10:27 | 2021-02-05T07:10:27 | 332,588,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,395 | py |
from itertools import combinations_with_replacement
# Question 1
# Twenty people are voting for one of 5 candidates.
# They have secret ballot, each voter votes for one of 5 candidates.
# The result of an election is the number of votes for each of the candidate.
# How many possible results can this vote have (the result of the vote is
# determined by the number of votes for each candidate)?
# ( 20 + (5-1) ) = ( 24 )
# ( 5-1 ) ( 4 )
#
count = 0;
for c in combinations_with_replacement("ABCDE", 20):
count+=1;
print(count);
#Question 2
#We have 9 identical candies and we want to distribute them between 3 different
#sections of our bag. It does not matter which candies go to which section.
#How many ways do we have to do it?
# ( 9 + (3-1) ) = ( 11 )
# ( 3-1 ) ( 2 )
#
count = 0;
for c in combinations_with_replacement("ABC", 9):
count+=1;
print(count);
#Question 1
#How many
#four-digit numbers are there such that their digits are non-increasing,
#that is each next digit is not greater than the previous one?
#Three-digit numbers are also four-digit, they just start with 0.
def fun(n):
count = 0;
for i in range(n):
for j in range(n):
for k in range(n):
for l in range(n):
if i >= j and j >= k and k >= l:
count += 1
print('for n = %d we have count = %d' %(n,count));
return
fun(10) | [
"luankevinfmb@hotmail.com"
] | luankevinfmb@hotmail.com |
4293b3acde9fd16c7d98f4e36d670978acca31a3 | f73f5f5d0770f731b5e76da39131ff36c9fde11e | /django_libs/tests/models_tests.py | 7c46c414daad0a285198d5f569b7e8cfa6ef2ad1 | [
"MIT"
] | permissive | SurferTank/django-libs | fcede8d7dff4ea58c728d05ff0030a3ce892a08e | 6ad3f7cf5f9a7a4848557d73af4a93054b34e27f | refs/heads/master | 2021-02-09T01:28:32.153104 | 2020-10-26T03:11:23 | 2020-10-26T03:11:23 | 244,222,230 | 0 | 0 | MIT | 2020-03-01T20:55:22 | 2020-03-01T20:55:21 | null | UTF-8 | Python | false | false | 519 | py | """Tests for the models of the ``django_libs`` app."""
from django.test import TestCase
from ..models import ColorField
from ..widgets import ColorPickerWidget
class ColorFieldTestCase(TestCase):
"""Tests for the ``ColorField`` model."""
longMessage = True
def test_functions(self):
color_field = ColorField()
color_field.formfield
self.assertIsInstance(
color_field.formfield().widget, ColorPickerWidget, msg=(
'Should add the color field widget.'))
| [
"tobias.lorenz@bitmazk.com"
] | tobias.lorenz@bitmazk.com |
f3ffceb64896a81db9b59622fbfb0a1798476e5b | ccc412c09af06cd2f55c74702436378f97ef460f | /examples/Texas_mapping_objects.py | 87bbab0b721ce3ea7b135807a6dc0fe825de4dbf | [
"MIT"
] | permissive | rasquith/choroshape | bde5237418d3e43c4321906c99732a285dfd8394 | 0e88465173fed56f0edb31b73a0a1962f3180464 | refs/heads/master | 2020-07-07T23:35:43.863863 | 2018-09-25T01:14:11 | 2018-09-25T01:14:11 | 67,946,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,744 | py | '''This file has objects used for making maps of Texas. Objects here are
intended for maps of Texas made with specific style guidelines'''
import pandas as pd
Texas_city_label_dict = {'city_names': ['El Paso', 'Odessa', 'Amarillo',
'Lubbock', 'Fort Worth', 'Dallas',
'Waco', 'Tyler', 'Brownsville',
'San Antonio', 'Austin', 'Laredo',
'Corpus Christi', 'College Station',
'Houston', 'Galveston'],
'city_positions': ['bot_left', 'top_right', 'top_left',
'top_left', 'bot_left', 'top_right',
'top_left', 'top_left', 'bot_left',
'top_left', 'top_left', 'bot_left',
'bot_right', 'top_left',
'top_left', 'bot_right'],
'dx': [.005, .005, .005,
.005, .005, .005,
.005, .005, .005,
.005, .005, .0065,
.024, .005,
.005, .005],
'dy': [.005, .005, .005,
.005, .005, .005,
.005, .005, .005,
.005, .005, .004,
.005, .005,
.005, .005]}
Texas_city_label_df = pd.DataFrame.from_dict(Texas_city_label_dict)
| [
"rachel.asquith@gmail.com"
] | rachel.asquith@gmail.com |
4467952bc3588edaf21b854d2cb536eb9a03be12 | 03034837c5f10d19fcc4dc51388f056ec43fd1d2 | /pro21.py | a5fa31a3792fe95d280dc9afd594c679ff30987a | [] | no_license | shaukhk01/project01 | e95c19844757c631f7ffbdd910b20316f49a945b | 79cfe784612fdbb4816c9fc3fc7222c845a3268f | refs/heads/master | 2020-06-26T07:20:49.844532 | 2019-08-20T06:06:40 | 2019-08-20T06:06:40 | 199,569,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | import re
def main():
matched = re.finditer('a{2}','abcdeaakkkaa')
for m in matched:
print(m.start(),'--',m.group())
main()
| [
"shaukhk01@gmail.com"
] | shaukhk01@gmail.com |
574d7ca56487ba0eab4dd0ce88086455d0675b1a | bf330ac532c742b06393d170104da3995308dbf9 | /fitlife/migrations/0002_products.py | faf0eb847e3f6d99b7f253eaadf95ac35316ab75 | [] | no_license | kml95/DjangroProject | 375c3ce292e35817c3af566ffcfbf20831dc96c0 | a8f33bbab7e5ffd2a666118d1d5d32c0b0655aba | refs/heads/master | 2021-04-06T19:37:36.296828 | 2017-06-01T19:28:08 | 2017-06-01T19:28:08 | 125,425,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-31 16:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fitlife', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Products',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, null=True)),
('type', models.CharField(max_length=50, null=True)),
],
),
]
| [
"adrian.kalinowski95@gmail.com"
] | adrian.kalinowski95@gmail.com |
a6a8f3e31af3d7f8c62c42ef83ed0d10121c2482 | 3a3b1b35bb1db050aecb7cfe1828516f1ce40aec | /assignment7/main.py | e34c5f1dd9bc150913bf150a9a62aee5f458acbd | [] | no_license | Shadesfear/SIP | 5249f51ec93dc92751944b3513e8214234f7af60 | e9081c6b87189bd16b074a161824b3743ca7b632 | refs/heads/master | 2021-01-06T16:23:15.379319 | 2020-03-23T22:53:52 | 2020-03-23T22:53:52 | 241,395,445 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 23 23:52:16 2020
"""
import exer1, exer2
if __name__ == "__main__":
exer1()
exer2() | [
"Blaedelblaedel@gmail.com"
] | Blaedelblaedel@gmail.com |
45a009dba2770c9039de56e2489370042d17c258 | 1ad42b8f706287b9c830590a7da8e83fbea57bb6 | /typeDensity.py | 5a7c17285f66e4cde588afe7ebfa9b7fa380bba8 | [] | no_license | ntBre/CNTResearch | 7e54d2dbc11bddd05cda6e998358c0dd9d74fb16 | 41a32423966cf09da5580339dfa933c45dc595bf | refs/heads/master | 2021-04-27T01:55:39.466045 | 2018-04-01T01:07:09 | 2018-04-01T01:07:09 | 122,684,605 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,858 | py | #Sectional distribution
def innertube(sections):
"""Takes the lampstrj file produced by the CHILL Algorithm and calculates the percent of each type of
water, printing these to files for each type of water for plotting"""
file_name = ""
tube_radius = 0
tube_length = 80
types = [[0] * sections for i in range(5)]
average_types = [[0] * sections for i in range(5)]
total_water = [0 for i in range(sections)]
atoms = False
frame_counter = 0
water_counter = 0
total_counter = 0
total_atoms = 0
section = 0
x_low = 0
x_high = 0
y_low = 0
y_high = 0
z_low = 0
z_high = 0
x = 0
y = 0
z = 0
r = 0
while file_name == "":
file_name = raw_input("Enter the file to be read: ")
tube_radius = float(raw_input("Enter the expected tube radius: "))
with open(file_name, 'r') as infile:
print(infile, "is being read")
out_name1 = file_name + ".hex.dens"
out_name2 = file_name + ".cub.dens"
out_name3 = file_name + ".ifcl.dens"
out_name4 = file_name + ".ldl.dens"
out_name5 = file_name + ".hdl.dens"
out_name6 = file_name + ".tot.dens"
out1 = open(out_name1, 'w')
out2 = open(out_name2, 'w')
out3 = open(out_name3, 'w')
out4 = open(out_name4, 'w')
out5 = open(out_name5, 'w')
out6 = open(out_name6, 'w')
out_list = [out1, out2, out3, out4, out5, out6]
out1.write("#Section Proportion Hexagonal Ice \n")
out2.write("#Section Cubic Ice\n")
out3.write("#Section Interfacial\n")
out4.write("#Section LDL\n")
out5.write("#Section HDL\n")
out6.write("#Radius Waters\n")
for line in infile:
if line == "ITEM: TIMESTEP\n":
line = skip(line, infile, 3)
split = line.split()
total_atoms = split[0]
line = skip(line, infile, 2)
split = line.split()
x_low = split[0]
x_high = split[1]
line = skip(line, infile, 1)
split = line.split()
y_low = split[0]
y_high = split[1]
line = skip(line, infile, 1)
split = line.split()
z_low = split[0]
z_high = split[1]
line = skip(line, infile, 2)
frame_counter += 1
atoms = True
if atoms:
total_counter += 1
split = line.split()
x = to_normal_coord(x_low, x_high, split[2])
y = to_normal_coord(y_low, y_high, split[3])
z = to_normal_coord(z_low, z_high, split[4])
r = (x**2+y**2)**.5
if int(split[1]) < 6 and z > 0 and z < tube_length and r < tube_radius:
types[int(split[1])-1][get_section(r,sections,tube_radius)] += 1
water_counter += 1
if total_counter == int(total_atoms):
for i in range(len(types)):
for j in range(len(types[i])):
if water_counter > 0:
average_types[i][j] += (float(types[i][j])/float(water_counter))
else:
average_types[i][j] += (float(types[i][j]))
water_counter = 0
total_counter = 0
types = [[0] * sections for i in range(5)]
atoms = False
infile.close()
for i in range(len(average_types)):
for j in range(len(average_types[i])):
total_water[j] += average_types[i][j]
out_list[i].writelines(str(j*(tube_radius/sections))+" "+str(average_types[i][j]/frame_counter)+"\n")
for i in range(len(total_water)):
out6.writelines(str(i*tube_radius/sections)+" "+str(total_water[i]/frame_counter)+"\n")
def skip(iterator, file, num):
"""Skips nLines in the file infile"""
for i in range(num):
iterator = file.next()
return iterator
def to_normal_coord(low, high, relative_coord):
"""Converts the default lammpstrj coordinates to normal coordinates"""
low = float(low)
high = float(high)
relative_coord = float(relative_coord)
converted_coord = (high - low)*relative_coord + low
return converted_coord
def get_section(r, num_sections, tube_radius):
"""Takes an r value, the number of sections, and the radius of the tube and returns the corresponding section"""
section = int(r / (tube_radius / num_sections))
return section
innertube(100)
| [
"bwestbro@stedwards.edu"
] | bwestbro@stedwards.edu |
d46615282317fbeef2fe89137c6aeb7e9cd8cbc0 | 3f99a5be32a57c59fbf3a0dfcc74594215fb3001 | /bi_account_invoice_layout/models/account_invoice_inherit.py | 45af6190f2ffd01511cba56dae3a6a9002c8e417 | [] | no_license | Brahim820/dulexlab | b8e6891df22a4acd3d52775e61b2e9ad122a583d | 7f8f7519acb08b0b2704aeac61878f2c459d53e1 | refs/heads/master | 2020-08-01T02:52:35.343291 | 2019-09-02T09:08:52 | 2019-09-02T09:08:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,487 | py | # -*- coding: utf-8 -*-
from odoo import fields, models, api, _
class AccountInvoiceLineInherit(models.Model):
_inherit = 'account.invoice.line'
public_price_lst = fields.Monetary(string='Public Price', compute='get_public_price_lst', store=True)
phd_disc = fields.Float(string='PHD %')
dd_disc = fields.Float(string='DD %')
cd_disc = fields.Float(string='CD %')
@api.multi
@api.depends('product_id', 'price_unit', 'invoice_line_tax_ids')
def get_public_price_lst(self):
for line in self:
if line.invoice_line_tax_ids:
prod_public_price_rate = self.env['ir.config_parameter'].sudo().get_param('prod_public_price_rate')
line.public_price_lst = line.price_unit * float(prod_public_price_rate)
else:
line.public_price_lst = line.price_unit
class AccountInvoiceInherit(models.Model):
_inherit = 'account.invoice'
discount_amount = fields.Monetary('', compute='get_discount_amount', store=True, track_visibility='onchange')
@api.multi
@api.depends('invoice_line_ids', 'invoice_line_ids.quantity', 'invoice_line_ids.price_unit',
'invoice_line_ids.discount')
def get_discount_amount(self):
for invoice in self:
disc_amount = 0.0
for line in invoice.invoice_line_ids:
disc_amount += (line.quantity * line.price_unit * line.discount / 100)
invoice.discount_amount = disc_amount
| [
"mkhairy@iskydev.com"
] | mkhairy@iskydev.com |
6d858a6f7e8b2f12d4ec8b67ef93d19370d79f0f | 8398dcf8b46493c1487f23ba259d9140cc7fe556 | /visionMiniProject/kakao/face_input.py | d6bdb51f93c4d346f83121bf55e2472a8c479aa5 | [] | no_license | JU-RI/visionMiniProject | 8ac5dee87a48deafb80c0d66c9127d9368dcb4ad | b55e01a2512f976524e7d5b8c41327f234eb840f | refs/heads/master | 2020-05-23T23:59:08.187333 | 2019-05-18T07:50:05 | 2019-05-18T07:50:05 | 187,006,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,293 | py | import requests
import argparse
import sys
import math
import random
from collections import Counter
import searchdb
from PIL import Image, ImageTk
import tkinter as tk
from tkinter import filedialog
import tkinter.messagebox
photolabel, photolabel2, info_label = None, None, None
gender, age, corner = None, None, None
root = None
API_URL = "https://kapi.kakao.com/v1/vision/face/detect" #상수 -> 대문자로 씀(값을 바꾸지 않을 것이라는 뜻)
MYAPP_KEY = "9668247f106b2c87b5121a0c57f40b4a" #REST API 키
def file_open():# 파일을 열어서 분석하는 버튼
global filelabel, photolabel, gender, age, corner
filename = filedialog.askopenfilename(initialdir="C:/Users/Darkvalui/Pictures", title="choose your file",
filetypes=(("jpeg files", "*.jpg"), ("all files", "*.*")))
img = Image.open(filename)
resized1 = img.resize((600, 400))
photo1 = ImageTk.PhotoImage(resized1)
photolabel.photo1 = photo1
photolabel.configure(image=photo1)
parser = argparse.ArgumentParser(description='')
parser.add_argument('image_file', type=str, nargs='?', default=filename, help='help...')
args = parser.parse_args()
result_json = detect_face(args.image_file)
result = result_json['result']
if 'faces' not in result:
tk.messagebox.showerror("오류", "얼굴을 인식하지 못했습니다.\n다른 사진을 선택해 주세요.")
else:
gen = result['faces'][0]['facial_attributes']['gender']
age = math.trunc(result['faces'][0]['facial_attributes']['age']) # 소수점 버림
if gen['male'] > gen['female']: gender = "남"
elif gen['male'] < gen['female']: gender = "여"
corner_list = ['정육', '식품', '스낵', '가전제품']
corner = random.choice(corner_list)
print("gender:", gender, "age:", age, "corner:", corner)
tk.messagebox.showinfo("사진 등록 완료", "분석결과를 확인하세요.")
result_label = tk.Label(root, text="분석결과 성별은 '" + str(gender) + "', 나이는 '" + str(age) + "' 세 입니다.",
font=("맑은 고딕", 35), fg="blue", bg="white")
result_label.place(x=200, y=25)
def db_btn():
global gender, age, corner, root
searchdb.sqlinsert(gender, str(age), corner)
tk.messagebox.showinfo("등록 완료", "분석결과가 DB에 등록되었습니다.")
def like_btn(): #성별, 연령으로 상품추천
global info_label, photolabel2, best
search_result = searchdb.sqlselect()
corner_f1, corner_f2, corner_f3 = [], [], []
corner_m1, corner_m2, corner_m3 = [], [], []
for x in range(0, len(search_result)):
if search_result[x][1] < 30:
if search_result[x][0] == "여":
corner_f1.append(search_result[x][2])
cn = Counter(corner_f1)
label_text = str("10~20대 여성이 선호하는")
elif search_result[x][0] == "남":
corner_m1.append(search_result[x][2])
cn = Counter(corner_m1)
label_text = str("10~20대 남성이 선호하는")
elif 30 < search_result[x][1] < 50:
if search_result[x][0] == "여":
corner_f2.append(search_result[x][2])
cn = Counter(corner_f2)
label_text = str("30~40대 여성이 선호하는")
elif search_result[x][0] == "남":
corner_m2.append(search_result[x][2])
cn = Counter(corner_m2)
label_text = str("30~40대 남성이 선호하는")
elif search_result[x][1] > 50:
if search_result[x][0] == "여":
corner_f3.append(search_result[x][2])
cn = Counter(corner_f3)
label_text = str("50대 이상 여성이 선호하는")
elif search_result[x][0] == "남":
corner_m3.append(search_result[x][2])
cn = Counter(corner_m3)
label_text = str("50대 이상 남성이 선호하는")
mode = cn.most_common(1)
best = mode[0][0]
result_label = tk.Label(root, text=label_text + " " + best + "코너 상품을 추천합니다.",
font=("맑은 고딕", 30), fg="orange", bg="white", width=50)
result_label.place(x=100, y=26)
best_show()
def best_show():
global info_label, photolabel2, best
meat_list = ['./img/meat1.png', './img/meat2.png', './img/meat3.png']
food_list = ['./img/food1.png', './img/food2.png', './img/food3.png']
snack_list = ['./img/snack1.png', './img/snack2.png', './img/snack3.png']
elec_list = ['./img/elec1.png', './img/elec2.png', './img/elec3.png']
if best == '정육':
img = Image.open(random.choice(meat_list))
resized1 = img.resize((600, 400))
photo1 = ImageTk.PhotoImage(resized1)
photolabel2.photo1 = photo1
photolabel2.configure(image=photo1)
# info_label.configure(text="성별은:"+best[0]+" 나이는"+str(best[1])+"이시고 "+best[2]+ "코너를 좋아하시는군요 이 상품을 추천합니다")
elif best == '식품':
img = Image.open(random.choice(food_list))
resized1 = img.resize((600, 400))
photo1 = ImageTk.PhotoImage(resized1)
photolabel2.photo1 = photo1
photolabel2.configure(image=photo1)
# info_label.configure(text="성별은:"+best[0]+" 나이는"+str(best[1])+"이시고 "+best[2]+ "코너를 좋아하시는군요 이 상품을 추천합니다")
elif best == '스낵':
img = Image.open(random.choice(snack_list))
resized1 = img.resize((600, 400))
photo1 = ImageTk.PhotoImage(resized1)
photolabel2.photo1 = photo1
photolabel2.configure(image=photo1)
# info_label.configure(text="성별은:"+best[0]+" 나이는"+str(best[1])+"이시고 "+best[2]+ "코너를 좋아하시는군요 이 상품을 추천합니다")
elif best == '가전제품':
img = Image.open(random.choice(elec_list))
resized1 = img.resize((600, 400))
photo1 = ImageTk.PhotoImage(resized1)
photolabel2.photo1 = photo1
photolabel2.configure(image=photo1)
# info_label.configure(text="성별은:"+best[0]+" 나이는"+str(best[1])+"이시고 "+best[2]+ "코너를 좋아하시는군요 이 상품을 추천합니다")
def detect_face(filename):
headers = {'Authorization':'KakaoAK {}'.format(MYAPP_KEY)} #원하는 형태로 바꿔 줌 / MYAPP_KEY가 {}안에 들어감
#1. 카카오 접속
try: #네트워크 연결할 때는 예외처리 해줘야 함
files = {'file':open(filename, 'rb')} #원하는 형태로 바꿔줌 // 파일을 open해서 서버로 전송 // rb는 이미지
resp = requests.post(API_URL, headers=headers, files=files)
return resp.json() #결과를 json으로 받아오기
except Exception as e:
print(str(e))
print(type(e)) #예외 타입 출력
sys.exit(0) #에러 발생하면 종료
def main_ui():
global photolabel, photolabel2, root, info_label
root = tk.Tk()
root.geometry("1300x650+100+50")
root.title("상품추천시스템")
root.resizable("false", "false")
root.configure(bg="white")
file = "./img/title.jpg"
image = Image.open(file)
resized = image.resize((600, 400))
photo = ImageTk.PhotoImage(resized)
photolabel = tk.Label(root, image=photo)
file2 = "./img/like.png"
image2 = Image.open(file2)
resized2 = image2.resize((600, 400))
photo2 = ImageTk.PhotoImage(resized2)
photolabel2 = tk.Label(root, image=photo2)
open_btn = tk.Button(root, text="사진파일 불러오기", font=("배달의민족 도현", 20), command=file_open, bg="#efc140")
btn = tk.Button(root, text="분석결과 DB에 등록", font=("배달의민족 도현", 20), command=db_btn, bg="#efc140")
btn2 = tk.Button(root, text="상품추천버튼", font=("배달의민족 도현", 20), command=like_btn, bg="#efc140")
open_btn.place(x=210, y=523)
btn.place(x=200, y=578)
btn2.place(x=870, y=540)
photolabel.place(x=35, y=110)
photolabel2.place(x=660, y=110)
root.mainloop()
if __name__ == '__main__':
main_ui()
| [
"48680930+JU-RI@users.noreply.github.com"
] | 48680930+JU-RI@users.noreply.github.com |
41c56a7c5c0c9ce2bc578dda633139c181636491 | 3e21239d7b0537b038d32bda632a7137c3ce2fb0 | /coverage.py | 889b8210e523f1cdccb9a2008fea3e321059958d | [
"MIT"
] | permissive | rlopez93/seq-assembly-2 | 966cc2d80b7043fd5a76b43eff6424f5441b46c5 | d78a7ee8321dba8d025b5bf72b77454d1b6ffc36 | refs/heads/master | 2021-01-21T13:41:43.597041 | 2015-05-25T22:09:35 | 2015-05-25T22:09:35 | 35,104,802 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,780 | py | #! /usr/bin/env python2
from __future__ import print_function
import fileinput
"""
Usage: cat <map file> | python coverage.py
Note: this script assumes that all reads in
the map file are of the same gene
"""
# coverage[i] -> coverage for gene at position i
coverage = []
for line in fileinput.input():
fields = line.split()
# the 8th column of fields contains the errors
# we only want clean reads, so if fields has an
# 8th column, then we ignore it
if len(fields) == 8:
continue
# get location of current read from map file
location = int(fields[3])
length = 36
# calculate difference between end
# of current read and size of coverage
diff = location + length - len(coverage)
# if current read extends past current coverage
if diff > 0:
# extend coverage to make up the difference
coverage.extend([0] * diff)
# update coverage
for i in xrange(location, location+length):
coverage[i] += 1
# now we find what intervals of the genome
# have non-zero coverage
intervals = []
i = 0
while i < len(coverage):
# ignore positions with 0 coverage
while i < len(coverage) and coverage[i] == 0:
i += 1
if i >= len(coverage):
break
# set start point for current coverage interval
start = i
sub_coverage = [coverage[i]]
# find current coverage interval
i += 1
while i < len(coverage) and coverage[i] != 0:
sub_coverage.append(coverage[i])
i += 1
total = len(sub_coverage)
avg_cov = sum(sub_coverage) / float(total)
intervals.append((total, start, avg_cov))
for total, start, avg_cov in sorted(intervals, reverse=True):
print("{0:4d} {1:10d} {2:6.1f}"
.format(total, start, avg_cov))
| [
"ricardolopez93@gmail.com"
] | ricardolopez93@gmail.com |
88e50a08bbca948a79e849d9561cf42f78aca8d4 | 34ffaabea6d4ca25edd893e0429d846fdea78856 | /effidavit/urls.py | e6f8a71f1af46b1cbb462abc376b8f98714285b4 | [] | no_license | bkanuka/efidavit | 730e80f48dd875a77e2ea4979f0d110248c2d519 | 0ec341452ce9486ee0582c6402ebdcc325800939 | refs/heads/master | 2020-03-28T18:41:51.068012 | 2014-09-21T02:17:42 | 2014-09-21T02:17:42 | 24,279,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'jumpingintodjango.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^comingsoon/', 'comingsoon.views.index'),
url(r'^$', 'comingsoon.views.index'),
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| [
"bkanuka@gmail.com"
] | bkanuka@gmail.com |
175b16aa461473aa8fbeb39f96459c4ddc826859 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02937/s405333986.py | 77f923e46db0a03de4ebb7d4023a7c9648601069 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 694 | py | s=list(input())
t=list(input())
ns=len(s)
nt=len(t)
t_set=list(set(t))
s_set=list(set(s))
for i in range(len(t_set)):
if t_set[i] not in s_set:
print(-1)
exit()
from collections import defaultdict, deque
from bisect import bisect_right
ds=defaultdict(list)
for i in range(ns):
ds[s[i]].append(i)
components=[0]*26
for i in range(26):
components[i]=len(ds[chr(i+97)])
lt=[-1]*nt
ord('a')
last=-1
for i in range(nt):
j=bisect_right(ds[t[i]],last)
if j==components[ord(t[i])-97]:
lt[i]=ds[t[i]][0]
else:
lt[i]=ds[t[i]][j]
last=lt[i]
kuriage=0
for i in range(1,nt):
if lt[i]<=lt[i-1]:
kuriage+=1
print(kuriage*ns+lt[-1]+1) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
ffa7a37033c9eee4858e1ff0293d62b027d27f1c | d3942686f0b3d5c3bb077607bdd04af082363520 | /{{cookiecutter.project_slug}}/tests/__init__.py | 79b42da3269c9b900df483f37457663f1a4bbba6 | [
"BSD-3-Clause",
"GPL-3.0-only",
"GPL-1.0-or-later",
"GPL-3.0-or-later",
"LGPL-2.0-or-later",
"LGPL-2.1-or-later",
"LicenseRef-scancode-free-unknown",
"MIT",
"ISC",
"Apache-2.0"
] | permissive | audreyfeldroy/cookiecutter-pypackage | aaa3a26c314e990f3a4867c0722fa9d06f72755c | a3bfdf11b42fcfa2d3c721270ae7a112be551eb5 | refs/heads/master | 2023-08-25T16:22:23.781780 | 2023-08-16T03:23:40 | 2023-08-16T03:23:40 | 11,407,567 | 1,427 | 661 | BSD-3-Clause | 2023-08-16T03:23:42 | 2013-07-14T18:52:05 | Python | UTF-8 | Python | false | false | 61 | py | """Unit test package for {{ cookiecutter.project_slug }}."""
| [
"eugene.kim@ntti3.com"
] | eugene.kim@ntti3.com |
66710b834ba910ae871e35434ff06cc236671078 | 88a6bee119310e508bd04e87532a363eaafabc38 | /mattermostgithub/payload.py | e99404fef011930a7b6da540bed9729136c80ea1 | [
"MIT"
] | permissive | Y-Ge/mattermost-github-integration | efcd0be77649d47c22ecc2390b9210da48dde673 | 1124a0ff233b50ed6070cb84cfffd128ad219831 | refs/heads/master | 2020-05-03T06:46:05.224619 | 2019-01-16T10:17:18 | 2019-01-16T10:17:18 | 178,481,570 | 0 | 0 | NOASSERTION | 2019-03-29T22:09:52 | 2019-03-29T22:09:52 | null | UTF-8 | Python | false | false | 9,155 | py | from io import BytesIO
import requests
try:
from PIL import Image
from mattermostgithub.config import SHOW_AVATARS
except ImportError:
SHOW_AVATARS = False
class Payload(object):
def __init__(self, data):
self.data = data
def user_link(self):
name = self.data['sender']['login']
url = self.data['sender']['html_url']
avatar = self.data['sender']['avatar_url'] + "&s=18"
return self.create_user_link(name, url, avatar)
def check_avatar_size(self, url):
f = requests.get(url)
img = Image.open(BytesIO(f.content))
f.close()
if img.size[0] <= 20 and img.size[1] <= 20:
return True
return False
def create_user_link(self, name, url, avatar):
if SHOW_AVATARS and self.check_avatar_size(avatar):
return " [%s](%s)" % (avatar, name, url)
return "[%s](%s)" % (name, url)
def repo_link(self):
name = self.data['repository']['full_name']
url = self.data['repository']['html_url']
return "[%s](%s)" % (name, url)
def preview(self, text):
if not text:
return text
l = text.split("\n")
result = l[0]
if result[-1] in "[\n, \r]":
result = result[:-1]
if result != text:
result += " [...]"
return result
class PullRequest(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.number = self.data['pull_request']['number']
self.title = self.data['pull_request']['title']
self.body = self.data['pull_request']['body']
self.url = self.data['pull_request']['html_url']
def opened(self):
body = self.preview(self.body)
msg = """%s opened new pull request [#%s %s](%s) in %s:
> %s""" % (self.user_link(), self.number, self.title,
self.url, self.repo_link(), body)
return msg
def assigned(self):
to_name = self.data['assignee']['login']
to_url = self.data['assignee']['html_url']
to_avatar = self.data['assignee']['avatar_url'] + "&s=18"
to = self.create_user_link(to_name, to_url, to_avatar)
msg = """%s assigned %s to pull request [#%s %s](%s).""" % (self.user_link(),
to, self.number, self.title, self.url)
return msg
def closed(self):
merged = self.data['pull_request']['merged']
action = "merged" if merged else "closed"
msg = """%s %s pull request [#%s %s](%s).""" % (self.user_link(),
action, self.number, self.title, self.url)
return msg
def synchronize(self):
msg = """%s modified pull request [#%s %s](%s).""" % (self.user_link(),
self.number, self.title, self.url)
return msg
class PullRequestReview(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.number = self.data['pull_request']['number']
self.title = self.data['pull_request']['title']
self.body = self.data['review']['body']
self.url = self.data['review']['html_url']
def submitted(self):
body = self.preview(self.body)
msg = """%s submitted a review on pull request [#%s %s](%s):
> %s""" % (self.user_link(), self.number, self.title, self.url, body)
return msg
class PullRequestComment(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.number = self.data['pull_request']['number']
self.title = self.data['pull_request']['title']
self.body = self.data['comment']['body']
self.url = self.data['comment']['html_url']
def created(self):
body = self.preview(self.body)
msg = """%s commented on pull request [#%s %s](%s):
> %s""" % (self.user_link(), self.number, self.title, self.url, body)
return msg
class Issue(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.number = self.data['issue']['number']
self.title = self.data['issue']['title']
self.url = self.data['issue']['html_url']
self.body = self.data['issue']['body']
def opened(self):
body = self.preview(self.body)
msg = """%s opened new issue [#%s %s](%s) in %s:
> %s""" % (self.user_link(), self.number, self.title, self.url, self.repo_link(), body)
return msg
def labeled(self):
label = self.data['label']['name']
msg = """%s added label `%s` to issue [#%s %s](%s) in %s.""" % (self.user_link(), label, self.number, self.title, self.url, self.repo_link())
return msg
def closed(self):
msg = """%s closed issue [#%s %s](%s) in %s.""" % (self.user_link(), self.number, self.title, self.url, self.repo_link())
return msg
def assigned(self):
name = self.data['assignee']['login']
url = self.data['assignee']['html_url']
avatar = self.data['assignee']['avatar_url'] + "&s=18"
assignee = self.create_user_link(name, url, avatar)
msg = """%s assigned %s to issue [#%s %s](%s) in %s.""" % (self.user_link(), assignee, self.number, self.title, self.url, self.repo_link())
return msg
class IssueComment(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.number = self.data['issue']['number']
self.title = self.data['issue']['title']
self.url = self.data['comment']['html_url']
self.body = self.data['comment']['body']
def created(self):
body = self.preview(self.body)
msg = """%s commented on [#%s %s](%s):
> %s""" % (self.user_link(), self.number, self.title, self.url, body)
return msg
class CommitComment(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.cid = self.data['comment']['commit_id'][:7]
self.url = self.data['comment']['html_url']
self.body = self.data['comment']['body']
def created(self):
body = self.preview(self.body)
msg = """%s commented on [%s](%s):
> %s""" % (self.user_link(), self.cid, self.url, body)
return msg
class Repository(Payload):
def __init__(self, data):
Payload.__init__(self, data)
def created(self):
descr = self.data['repository']['description']
msg = """%s created new repository %s:
> %s""" % (self.user_link(), self.repo_link(), descr)
return msg
class Branch(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.name = self.data['ref']
def created(self):
msg = """%s added branch `%s` to %s.""" % (self.user_link(),
self.name, self.repo_link())
return msg
def deleted(self):
msg = """%s deleted branch `%s` in %s.""" % (self.user_link(),
self.name, self.repo_link())
return msg
class Tag(Payload):
def __init__(self, data):
Payload.__init__(self, data)
self.name = self.data['ref']
def created(self):
msg = """%s added tag `%s` to %s.""" % (self.user_link(),
self.name, self.repo_link())
return msg
class Push(Payload):
def __init__(self, data):
Payload.__init__(self, data)
def commits(self):
commits = self.data['commits']
branch = self.data['ref'].replace("refs/heads/", "")
branch_url = self.data['repository']['html_url'] + "/tree/" + branch
if not commits:
commits = [self.data['head_commit']]
changeset = "changesets" if len(commits) > 1 else "changeset"
msg = []
msg.append("%s pushed %s %s to [%s](%s) at %s:" % (self.user_link(), len(commits), changeset, branch, branch_url, self.repo_link()))
for commit in commits:
cid = commit['id'][:7]
curl = commit['url']
cmsg = self.preview(commit['message'])
ctext = "- [`%s`](%s): %s" % (cid, curl, cmsg)
msg.append("\n")
msg.append(ctext)
return "".join(msg)
class Wiki(Payload):
def __init__(self, data):
Payload.__init__(self, data)
def updated(self):
pages = self.data['pages']
msg = []
msg.append("%s changes %s pages in Wiki at %s:" % (self.user_link(), len(pages), self.repo_link()))
for page in pages:
page_name = page['page_name']
title = page['title']
summary = page['summary']
url = "%s/_compare/%s" % (page['html_url'], page['sha'])
action = page['action']
if summary :
ctext = "- %s [%s](%s)\n>%s" % (action, page_name, url,summary)
else :
ctext = "- %s [%s](%s)\n" % (action, page_name, url)
msg.append("\n")
msg.append(ctext)
return "".join(msg)
class Status(Payload):
def __init__(self, data):
Payload.__init__(self, data)
def updated(self):
url = self.data["target_url"]
description = self.data["description"]
msg = "[%s](%s) in %s." % (description, url, self.repo_link())
return msg
| [
"lukas.diekmann@gmail.com"
] | lukas.diekmann@gmail.com |
d91c472494a14944078b8c134a26b081bd7333e7 | 525c3a824a5bab4d72a355bfe9d504a170793cf6 | /AltScraper/AltScraper/pipelines.py | 8b422c45455b7e0db592e7b7c96c344800ee181e | [] | no_license | khuhroproeza/AltApi | b5dce386119d49b829c4ad19306692d2a32b6b7c | 0c869c34a084e95775e0694d627098552870bd59 | refs/heads/master | 2023-03-14T03:16:30.742540 | 2021-02-21T01:04:18 | 2021-02-21T01:04:18 | 340,785,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
class AltscraperPipeline:
def process_item(self, item, spider):
return item
| [
"s.khuhro@appose.com"
] | s.khuhro@appose.com |
997c692b5f8aa3a1df50e49dd5f625b4b2a76106 | 636a45b40f0a31a0f01487ebd0270a01c350a7e8 | /my_functions.py | 635661302dedc84d7478dc67c755a48733c0de5b | [] | no_license | Marley-C-Robinson-99/Windows-Test | 4afad46810f08c5b0bd0d0303310fb6eb02954f8 | 3fc1240126c07de01836032ab2e9241b6bbf25a8 | refs/heads/main | 2023-06-16T21:23:48.591193 | 2021-07-14T17:26:27 | 2021-07-14T17:26:27 | 380,139,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,198 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def is_two(num):
return num == 2 or num == '2'
def is_vowel(strng):
return strng in ('aeiouAEIOU')
def is_consonant(l):
return is_vowel(l) == False
def cap(word):
if word[0] != ('aeiouAEIOU'):
return word.title() #Used ttitlecase to capitalize first word in case of SSV in value
else:
return word
def calculate_tip(bill, tip):
total = 0 #setting base var to 0
total += (tip * bill) #Math for calc tim + total
return total + total
def apply_discount(price, d):
return price * (d / 100) #assuming a whole int input for d, converts percentage to decima
def handle_commas(num):
nums = num.split(",") #splitting along commas
return "".join(nums) #Rejoining the list insto a str
def remove_vowels(word):
new_word =[] #Empty list for use later
for letter in word:
if is_vowel(letter) == False: #calling a previous function
new_word.append(letter) #If word is not vowel, append to empty list
return "".join(new_word)
def get_letter_grade(grade):
if grade > 0 and grade < 60: #doing grade ranges from F to A
return("F")
elif grade >= 60 and grade < 70:
return("D")
elif grade >= 70 and grade < 80:
return("C")
elif grade >= 80 and grade < 90:
return("B")
elif grade >= 90 and grade < 100:
return("A")
def normalize_name(name):
newname = "" #init empty str
namestrp = name.strip() #stripping name value
for char in namestrp:
if (char.isnumeric() == True) or (char.isalpha()) or (char == " "): #checking for chars, nums, and spaces
newname += char #if is num, char, or space, add to empty str
normal_name = newname.replace(" ", "_") #replacing spaces with underscores
return normal_name.lower() #Finally lowercasing
def cumulative_sum(nums):
lst1 = [] #init empty list
n = 0 #init empty var
for num in range(0,len(nums)):
n += nums[num] #for each number, += n to nums[index], then moves on and does the same with n = the sum of previous numbers
lst1.append(n) #appending n to the list with each iteration
return lst1 | [
"marleycrobinson99@gmail.com"
] | marleycrobinson99@gmail.com |
eea01f46700e145a52ea864f42942e182a2b4072 | 3eb781bc3669496b0de3dbe637f2ebcba5cb7fca | /blog/urls.py | 068f5a975335045a737555faa3e6e0165ae7acca | [] | no_license | kitano333/my-first-blog | 8f3508391e6d48a932fa2d5e9d1a20441dae5e15 | df94d9a1cabf0479b21f29b6bb7ad4fad60aab45 | refs/heads/master | 2020-08-02T21:41:16.166154 | 2019-10-10T16:14:04 | 2019-10-10T16:14:04 | 199,885,243 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 862 | py | from django.urls import path
from . import views
#これはDjangoの path 関数と、blog アプリの全ての ビュー(といっても、今は一つもありません。すぐに作りますけど!)をインポートするという意味です。
urlpatterns = [
path('', views.post_list, name='post_list'),
]
#見てのとおり、post_list という名前の ビュー をルートURLに割り当てています。 このURLパターンは空の文字列に一致し、Django URLリゾルバーはURLのフルパスの前半にくっつくドメイン名(つまり、http://127.0.0.1:8000/ の部分)を無視します。 このパターンは誰かがあなたのWebサイトの 'http://127.0.0.1:8000/' というアドレスにアクセスしてきたら views.post_list が正しい行き先だということをDjangoに伝えます。
| [
"poao333@yahoo.co.jp"
] | poao333@yahoo.co.jp |
e50f135364772e42c6edaf110f4b2dd0b7d1d55c | 1fcbe1f4ec9f55b1c576e848e793228323ed2d61 | /order/migrations/0001_initial.py | 0e0281f1158c2d262d9b2bf6b5f4e14119c06c29 | [] | no_license | jzhang0228/coin_registeration | 48db7f697c0eedcaba6cbf2824f39334687a40d0 | fd6482c6390f6afac5b84a185f6fbb35a66e348e | refs/heads/master | 2021-01-10T09:30:57.131499 | 2016-04-19T06:37:43 | 2016-04-19T06:37:43 | 50,980,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,842 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-03 07:06
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Business',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('price', models.FloatField(default=0, null=True)),
('profit', models.FloatField(default=0, null=True)),
('active', models.BooleanField(default=True)),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'db_table': 'business',
},
),
migrations.CreateModel(
name='OrderDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_number', models.CharField(max_length=100, verbose_name='Order Number')),
('email', models.EmailField(max_length=100, verbose_name='Email')),
('order_status', models.CharField(blank=True, max_length=20, null=True)),
('shipping_status', models.CharField(blank=True, max_length=20, null=True)),
('tracking_number', models.CharField(blank=True, max_length=50, null=True)),
('ship_date', models.DateTimeField(null=True)),
('deliver_date', models.DateTimeField(null=True)),
('shipped', models.BooleanField(default=False)),
('processing', models.BooleanField(default=False)),
('on_hold', models.BooleanField(default=False)),
('not_found', models.BooleanField(default=False)),
('cancelled', models.BooleanField(default=False)),
],
options={
'db_table': 'order_detail',
},
),
migrations.CreateModel(
name='UserInformation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_time', models.DateTimeField(default=django.utils.timezone.now)),
('name', models.CharField(max_length=100, verbose_name='* Full Name')),
('mitbbs_id', models.CharField(blank=True, max_length=100, null=True, verbose_name='MITBBS ID')),
('endorse_mitbbs_id', models.CharField(blank=True, max_length=100, null=True, verbose_name='\u80cc\u4e66\u4ebaMITBBS ID')),
('email', models.EmailField(max_length=100, verbose_name='* Email')),
('phone', models.CharField(max_length=20, verbose_name='* Phone')),
('zip', models.CharField(blank=True, max_length=10, null=True, verbose_name='Zip Code')),
('billpay_number', models.CharField(max_length=100, verbose_name='* Pill Pay Number')),
('billpay_credit_card_type', models.CharField(max_length=50, verbose_name='* Pill Pay Credit Card Type')),
('paid', models.FloatField(default=0)),
('business', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='order.Business')),
],
options={
'db_table': 'user_information',
},
),
migrations.AddField(
model_name='orderdetail',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='order.UserInformation'),
),
]
| [
"jzhang0228@gmail.com"
] | jzhang0228@gmail.com |
3585d6a65316de2774692932e0998c9bca0ae3af | ac53ed07a534db727e57714f5fdd07950d22c170 | /Python学习/day04/匿名函数.py | b55019111525f8b8a8cb09de9d5e4c6a19f97327 | [
"MIT"
] | permissive | taoyan/python | b8847911068083481e34567716710abaf5bbe4be | 3fbb1e534ec91b33cd2ac690966d7253c2f5d757 | refs/heads/master | 2022-12-11T17:15:59.411554 | 2019-10-28T03:50:31 | 2019-10-28T03:50:31 | 147,048,609 | 1 | 0 | MIT | 2022-12-10T14:53:29 | 2018-09-02T02:39:40 | Python | UTF-8 | Python | false | false | 839 | py | #匿名函数
#lambda修饰
result = (lambda x,y:x+y)(1,2)
print(result)
#使用场景:简化代码
#匿名函数的调用
func = lambda x,y:x*y
result = func(1,2)
print(result)
#判断是否是偶数
def is_os(num):
if num % 2 == 0:
return True
else:
return False
print(is_os(1))
#使用匿名函数判断
new_func = lambda num:True if num % 2 == 0 else False
print(new_func(1))
#对字典列表排序可以使用匿名函数
my_list = [2,8,7]
my_list.sort() #排序,无返回值
print(my_list)
my_list = [{"name":'zs',"age":19},{"name":'ls',"age":12}]
#不能排序
# my_list.sort()
# print(my_list)
#匿名函数排序
my_list.sort(key = lambda item:item["age"])
print(my_list)
#正常排序字典
def get_value(item):
return item['age']
my_list.sort(key = get_value, reverse = True)
print(my_list)
| [
"1136523628@qq.com"
] | 1136523628@qq.com |
0fcbfd1b29cd78bfc26627d84f1dc484ef4cd791 | 1dbb922246b549d2721fb0bde46b49ee94d65789 | /tests/base.py | 5cbf76b6b7e17e1ed4243858d3e555b46362405d | [] | no_license | easyNav/easyNav-sensors-wifi | a9ae91cc5ab9ffb3113d081f8b43b9761942abe0 | 00acfd48473efb52b838379a25de516d52a222d4 | refs/heads/master | 2021-01-01T15:24:36.112766 | 2014-10-16T15:58:09 | 2014-10-16T15:58:09 | 25,121,604 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of easyNav-sensors-wifi.
# https://github.com/easyNav/easyNav-sensors-wifi
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2014 Joel Tong me@joeltong.org
from unittest import TestCase as PythonTestCase
class TestCase(PythonTestCase):
pass
| [
"me@joeltong.org"
] | me@joeltong.org |
9517457d7e8d2371a55f8d478f0d120d0d465c5c | c92035d0192a9cc65d0319c454beca8974808f42 | /app/map/urls.py | 6a59b642c1e4fc612b5998ecf62122d1d139ae33 | [] | no_license | ueue511/django-celery | 0fd710e1326e0ac636a843d742b961971724b562 | 3422ba995adcba5561f14350c7dc3fcd4d9a6e1c | refs/heads/master | 2023-07-04T08:39:28.993106 | 2021-08-12T11:56:57 | 2021-08-12T11:56:57 | 395,297,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | from django.urls import path
from . import views
urlpatterns = [
path('map/', views.Make_Map.as_view(), name='make_map'),
path('make_map/', views.make_map_post, name='make_map_post'),
]
| [
"nexnex07@yahoo.co.jp"
] | nexnex07@yahoo.co.jp |
6ad2e977a128e69b0bb0e99f23b1c334efee2083 | a1107969d677024c15edd4808b7006f8421d64c2 | /venv/bin/pyqt5qmlscene | 4dfe09476a0e70d19b8e96594754030ba7e767d2 | [] | no_license | vlamik19/Server-Client-learn-socked | cafe6d6faff5b76b220b48a3d06bd24ee227c763 | 6bb1b31b3f85b1fbf29c7cb5438cbf449b2aaa93 | refs/heads/master | 2022-11-28T03:43:38.760346 | 2020-07-21T15:07:06 | 2020-07-21T15:07:06 | 278,643,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | #!/home/vladmik19/PycharmProjects/server2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pyqt5_tools.entrypoints import pyqt5qmlscene
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(pyqt5qmlscene())
| [
"madara020498@mail.ru"
] | madara020498@mail.ru | |
72873dffd27f14fc20be6c3a6a335451b79762f1 | 960cf810b9544128bc7eb32f25adafb7d668499e | /dd_using_vgg16.py | 5b1465fd407d653301d98efc80cdeb1aeb2cb39b | [] | no_license | janvi08/Distracted-driver-detection-system | 324c76296cdd584435144fe3a68afd23df3702c4 | 027a63b4d86efbd1efdf4b878b4f206694d1d9e3 | refs/heads/master | 2023-08-15T05:38:31.763741 | 2021-10-04T12:18:48 | 2021-10-04T12:18:48 | 413,400,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,398 | py | # -*- coding: utf-8 -*-
"""DD using VGG16.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1Elg597sYw5uSq8rGiDeoDOkORsZrbLVy
"""
from google.colab import drive
drive.mount('/content/gdrive')
import pandas as pd
df=pd.read_csv('gdrive/My Drive/inp/driver_imgs_list.csv')
df.head(5)
by_drivers = df.groupby('subject')
unique_drivers = by_drivers.groups.keys()
print(unique_drivers)
import tensorflow as tf
tf.test.gpu_device_name()
!ln -sf /opt/bin/nvidia-smi /usr/bin/nvidia-smi
!pip install gputil
!pip install psutil
!pip install humanize
import psutil
import humanize
import os
import GPUtil as GPU
GPUs = GPU.getGPUs()
gpu = GPUs[0]
def printm():
process = psutil.Process(os.getpid())
print("Gen RAM Free: " + humanize.naturalsize( psutil.virtual_memory().available ), " | Proc size: " + humanize.naturalsize( process.memory_info().rss))
from keras.applications.vgg16 import VGG16
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization, GlobalAveragePooling2D
from keras.models import Sequential, Model
from keras.preprocessing.image import ImageDataGenerator
def vgg_std16_model(img_rows, img_cols, color_type=3):
nb_classes = 10
vgg16_model = VGG16(weights="imagenet", include_top=False)
for layer in vgg16_model.layers:
layer.trainable = False
x = vgg16_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024, activation='relu')(x)
predictions = Dense(nb_classes, activation = 'softmax')(x)
model = Model(vgg16_model.input, predictions)
return model
print("Loading network...")
img_rows = 64
img_cols = 64
color_type = 1
print("Loading network...")
model_vgg16 = vgg_std16_model(img_rows, img_cols)
model_vgg16.summary()
model_vgg16.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
train_datagen = ImageDataGenerator(rescale = 1.0/255,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True,
validation_split = 0.2)
test_datagen = ImageDataGenerator(rescale=1.0/ 255, validation_split = 0.2)
img_rows = 64
img_cols = 64
color_type = 1
batch_size = 40
nb_epoch = 10
training_generator = train_datagen.flow_from_directory('gdrive/My Drive/inp/imgs/train',
target_size = (img_rows, img_cols),
batch_size = batch_size,
shuffle=True,
class_mode='categorical', subset="training")
validation_generator = test_datagen.flow_from_directory('gdrive/My Drive/inp/imgs/train',
target_size = (img_rows, img_cols),
batch_size = batch_size,
shuffle=False,
class_mode='categorical', subset="validation")
nb_train_samples = 17943
nb_validation_samples = 4481
from keras.callbacks import ModelCheckpoint, EarlyStopping
es = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=2)
checkpoint = ModelCheckpoint('gdrive/My Drive/saved_models/vgg16_model.hdf5', monitor='val_acc', verbose=1, save_best_only=True, mode='max')
history_v4 = model_vgg16.fit_generator(training_generator,
steps_per_epoch = nb_train_samples // batch_size,
epochs = 10,
callbacks=[es, checkpoint],
verbose = 1,
class_weight='auto',
validation_data = validation_generator,
validation_steps = nb_validation_samples // batch_size)
import h5py
model_vgg16.load_weights('gdrive/My Drive/saved_model/weights_best_vgg16.hdf5')
def plot_vgg16_test_class(model, test_files, image_number):
img_brute = test_files[image_number]
im = cv2.resize(cv2.cvtColor(img_brute, cv2.COLOR_BGR2RGB), (img_rows,img_cols)).astype(np.float32) / 255.0
im = np.expand_dims(im, axis =0)
img_display = cv2.resize(img_brute,(img_rows,img_cols))
plt.imshow(img_display, cmap='gray')
y_preds = model.predict(im, batch_size=batch_size, verbose=1)
print(y_preds)
y_prediction = np.argmax(y_preds)
print('Y Prediction: {}'.format(y_prediction))
print('Predicted as: {}'.format(activity_map.get('c{}'.format(y_prediction))))
plt.show()
import numpy as np
from glob import glob
import os
import cv2
from tqdm import tqdm
NUMBER_CLASSES = 10
# Color type: 1 - grey, 3 - rgb
def get_cv2_image(path, img_rows, img_cols, color_type=3):
# Loading as Grayscale image
if color_type == 1:
img = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
elif color_type == 3:
img = cv2.imread(path, cv2.IMREAD_COLOR)
# Reduce size
img = cv2.resize(img, (img_rows, img_cols))
return img
def load_test(size=12, img_rows=64, img_cols=64, color_type=3):
path = os.path.join('/content/gdrive/My Drive/inp/imgs/ourtest', '*.jpg')
print("Path: ", path)
files = sorted(glob(path))
print(files)
X_test, X_test_id = [], []
total = 0
files_size = len(files)
print(files_size)
for file in tqdm(files):
if total >= size or total >= files_size:
break
file_base = os.path.basename(file)
img = get_cv2_image(file, img_rows, img_cols, color_type)
X_test.append(img)
X_test_id.append(file_base)
total += 1
return X_test, X_test_id
def read_and_normalize_sampled_test_data(size, img_rows, img_cols, color_type=3):
test_data, test_ids = load_test(size, img_rows, img_cols, color_type)
test_data = np.array(test_data, dtype=np.uint8)
test_data = test_data.reshape(-1,img_rows,img_cols,color_type)
return test_data, test_ids
nb_test_samples = 12
test_files, test_targets = read_and_normalize_sampled_test_data(nb_test_samples, img_rows, img_cols, color_type)
print('Test shape:', test_files.shape)
print(test_files.shape[0], 'Test samples')
activity_map = {'c0': 'Safe driving',
'c1': 'Texting with right hand',
'c2': 'Talking on the phone with right hand',
'c3': 'Texting with left hand',
'c4': 'Talking on the phone with left hand',
'c5': 'Using radio',
'c6': 'Drinking',
'c7': 'Looking behind',
'c8': 'Makeup',
'c9': 'Talking to passenger'}
import matplotlib.pyplot as plt
batch_size = 40
def plot_vgg16_test_class(model, test_files, image_number):
img_brute = test_files[image_number]
im = cv2.resize(cv2.cvtColor(img_brute, cv2.COLOR_BGR2RGB), (img_rows,img_cols)).astype(np.float32) / 255.0
im = np.expand_dims(im, axis =0)
img_display = cv2.resize(img_brute,(img_rows,img_cols))
plt.imshow(img_display, cmap='gray')
y_preds = model.predict(im, batch_size=batch_size, verbose=1)
print(y_preds)
y_prediction = np.argmax(y_preds)
print('Y Prediction: {}'.format(y_prediction))
print('Predicted as: {}'.format(activity_map.get('c{}'.format(y_prediction))))
plt.show()
plot_vgg16_test_class(model_vgg16, test_files, 1)
plot_vgg16_test_class(model_vgg16, test_files, 0)
plot_vgg16_test_class(model_vgg16, test_files, 2)
plot_vgg16_test_class(model_vgg16, test_files, 3)
plot_vgg16_test_class(model_vgg16, test_files, 4)
plot_vgg16_test_class(model_vgg16, test_files, 4)
plot_vgg16_test_class(model_vgg16, test_files, 6)
plot_vgg16_test_class(model_vgg16, test_files, 7)
plot_vgg16_test_class(model_vgg16, test_files, 8)
plot_vgg16_test_class(model_vgg16, test_files, 10)
plot_vgg16_test_class(model_vgg16, test_files, 11)
plot_vgg16_test_class(model_vgg16, test_files, 9)
import tensorflow as tf
import h5py
model = tf.keras.models.load_model('gdrive/My Drive/saved_model/weights_best_vgg16.hdf5')
converter = tf.lite.TFLiteConverter.from_keras_model(model)
tflite_model = converter.convert()
open("converted_model.tflite", "wb").write(tflite_model)
| [
"janviphadtare9@gmail.com"
] | janviphadtare9@gmail.com |
9b1ea4f67a3f044fbccede3769ea8b82e6e0b05a | 5534b6f65381357d60c1c8277e27e99a98def5f8 | /2022/LeetCode/2022-11/prefix_sum.py | be837dc1b64a8e1f6cf68cd1b1216d623d82fbb5 | [] | no_license | ootz0rz/tinkering-and-hacking | 61bc326f87bca57b2c2bc660885a75b21a7b0f55 | 874ee305e53998a89ced77111aaae45b60b418e1 | refs/heads/master | 2023-01-28T05:27:02.812394 | 2023-01-24T20:05:02 | 2023-01-24T20:05:02 | 8,169,514 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,023 | py | from typing import List, Optional, Dict
# stupid...but works
import sys, os
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.dirname(SCRIPT_DIR))
from TestHarness import *
'''
Prefix sums allow us to find the sum of any subarray in O(1)O(1).
If we want the sum of the subarray from i to j (inclusive), then the answer is prefix[j] - prefix[i - 1],
or prefix[j] - prefix[i] + nums[i] if you don't want to deal with the out of bounds case when i = 0.
Building a prefix sum is very simple. Here's some pseudocode:
Given an integer array nums,
prefix = [nums[0]]
for i in [1, len(nums) - 1]:
prefix.append(nums[i] + prefix[prefix.length - 1])
'''
'''
Given an integer array nums, an array queries where queries[i] = [x, y] and an integer limit, return a boolean array
that represents the answer to each query. A query is true if the sum of the subarray from x to y is less than limit,
or false otherwise.
For example, given nums = [1, 6, 3, 2, 7, 2] and queries = [[0, 3], [2, 5], [2, 4]] and limit = 13, the answer is
[true, false, true]. For each query, the subarray sums are [12, 14, 12].
'''
def answer_queries(nums, queries, limit):
# first build prefix sum array
prefix = [nums[0]]
for i in range(1, len(nums)):
prefix.append(nums[i] + prefix[-1])
ans = []
for i,j in queries:
sum = prefix[j] - prefix[i] + nums[i]
ans.append(sum < limit)
return ans
'''
This is O(n+m), it takes O(n) to build the prefix array and then we can answer each query in O(1) time.
There are m queries, so O(m) to answer them all.
'''
# https://leetcode.com/problems/number-of-ways-to-split-array/
def waysToSplitArray(nums: List[int]) -> int:
'''
2270. Number of Ways to Split Array
Given an integer array nums, find the number of ways to split the array into two parts so that the first
section has a sum greater than or equal to the sum of the second section. The second section should have at least
one number.
'''
# first build prefix sum array
prefix = [nums[0]]
for i in range(1, len(nums)):
prefix.append(nums[i] + prefix[-1])
num_ways = 0
for i in range(len(prefix) - 1): # note, we do -1 because we want to make sure 'right side' has at least 1 element
# we could probably also build left_sum on the fly... adding nums[i] to it every iteration instead of
# having to build the prefix sum above first? Since we're always gradually expanding left_sum as we go along
# in this case
left_sum = prefix[i]
right_sum = prefix[-1] - prefix[i]
if left_sum >= right_sum:
num_ways = num_ways + 1
return num_ways
# https://leetcode.com/explore/interview/card/leetcodes-interview-crash-course-data-structures-and-algorithms/703/arraystrings/4658/
def runningSum(self, nums: List[int]) -> List[int]:
'''
literally just the prefix sums?
'''
prefix = [nums[0]]
for i in range(1, len(nums)):
prefix.append(nums[i] + prefix[-1])
return prefix
# https://leetcode.com/explore/interview/card/leetcodes-interview-crash-course-data-structures-and-algorithms/703/arraystrings/4657/
def minStartValue(nums: List[int]) -> int:
# prefix sums
prefix = [nums[0]]
for i in range(1, len(nums)):
prefix.append(nums[i] + prefix[-1])
'''
O(n) to build
'''
# choose a value k starting at 1, make sure k + prefix[i] >= 1 for all i in prefix, find smallest k
k = 0
while True:
k = k + 1
i = len(prefix) - 1
while (k + prefix[i]) >= 1 and i >= 0:
i = i - 1
if i < 0: # if we've gone through all values of nums for this value of k, we're good to go
return k
return k
check_solution_simple(minStartValue, args=[[1,2]], expected=1)
check_solution_simple(minStartValue, args=[[-3,2,-3,4,2]], expected=5)
check_solution_simple(minStartValue, args=[[1,-2,-3]], expected=5) | [
"ootz0rz@gmail.com"
] | ootz0rz@gmail.com |
2f7161e1462efe4a1f7d4281499acae7de884531 | b7e9c505f3634f1ab0a28d4468fd7adb1d6cbf92 | /simple_linear_regression.py | 5a19f7618038446a71e18bd10034f3bc7fa36577 | [] | no_license | devarajnadiger/LinearRegression | a9cac70f606c27adc4e5a4f4a5baef1b8b6fd340 | b9a6c033e84b2ac4435f5e861db6b0f57f26a7b6 | refs/heads/master | 2020-04-23T06:49:31.640253 | 2019-02-23T04:52:05 | 2019-02-23T04:52:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,618 | py | '''
----------------------------------------------------------------------------------
simple linear regression python code without using pandas and scikit library
with only one feature that is x
output is y
hypothesis function(predicted/model function) is h
parameters/coefficients theta0 and theta1
-------------------------prepared by Devaraj Nadiger------------------------------
'''
import numpy as np
import matplotlib.pyplot as plt
LEARNING_RATE=0.01
ITERATIONS=10
#input x and output y with hypothesis function h.
x=np.array([1,2,4,3,5])
y=np.array([1,3,3,2,5])
h=np.zeros(5)
m=len(x)
theta0=1
theta1=1
a=LEARNING_RATE
count=0
while(count<ITERATIONS):
cost=[0]*ITERATIONS
for i in range(0,m):
h[i]=theta0+theta1*x[i]
print('hypothesis function,h(theta)=',h)
for i in range(0,m):
s=np.sum((h[i]-y[i])**2)
j=s/(2*m)
#storing each iterated cost function
cost[i]=j
#calculation of parameters
theta0=theta0-(a/m)*np.sum(h[i]-y[i])
theta1=theta1-(a/m)*np.sum((h[i]-y[i])*x[i])
print('cost function,j(theta)=',j)
print('theta0=',theta0)
print('theta1=',theta1)
print('\n')
count=count+1
k=list(range(0,ITERATIONS,1))
plt.scatter(k,cost,color='r')
plt.plot(k,cost)
plt.title('cost function vs iterations ')
plt.xlabel('iterations')
plt.ylabel('cost function')
plt.show()
plt.scatter(x,y,color='r')
plt.plot(x,h)
plt.title('predicted(curve) and actual(dots) output ')
plt.xlabel('data set')
plt.ylabel('output')
plt.show()
| [
"noreply@github.com"
] | devarajnadiger.noreply@github.com |
4ad57e5623a534930577b2344a2f132f793c8bb5 | 7dc295d045982180f89e2bca204148c715dcdd8c | /using_context/using_redirection.py | b3b04bf12790a109758b99bd39c6bd769572946a | [] | no_license | onionmccabbage/AdvancedPythonMay2021 | 0c582e2502672c5d0974a46da1f689ac44c41728 | 5194fb191f8d01521d54d6867084ae6845a3726c | refs/heads/main | 2023-04-20T07:32:29.119310 | 2021-05-13T14:53:01 | 2021-05-13T14:53:01 | 366,090,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,117 | py | # we can write a class to redirect the context (simple solution)
import sys # sys is in control of inputs and outputs
class Redirect:
'''
Provide an easy way to redirect the standard output
(which defaults to printing to the console)
'''
def __init__(self, new_stdout):
self.new_stdout = new_stdout
# we override __enter__ and __exit__
def __enter__(self):
'''implement a redirection'''
#store the current stdout
self.save_stdout = sys.stdout
#set a new stdout
sys.stdout = self.new_stdout # we have redefined a member of sys!!!!
def __exit__(self, exc_type, exc_value, exc_traceback):
'''restore the original stdout'''
sys.stdout = self.save_stdout
if __name__ == '__main__':
# print(sys.stdout)
# make use of our redicetion class
with open('mylog.txt', 'a') as fobj: # open a file access object
with Redirect(fobj):
print('this gets printed to our log file') # look - no file reference
print('this will print to the console') # back to stdout default
| [
"noreply@github.com"
] | onionmccabbage.noreply@github.com |
ba5bf4df83c1a5e401c6ac4d470108fae419940f | 25bb4e760769cc483a20f27b6312698891dce034 | /python/Closures and Decorators/decorators-2-name-directory-English.py | 529c7f5310dd4be88cfeca5669d97fbd3c92bd2b | [] | no_license | rangaeeeee/codes-hackerrank | e13d22adff1ef74974e34251d9bfac6cfd36f2b0 | ce7fdf7f336c10164fd2f779d4ed3713849d7c2b | refs/heads/master | 2021-01-19T17:07:28.451983 | 2017-09-01T18:05:33 | 2017-09-01T18:05:33 | 101,049,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | from operator import itemgetter
def person_lister(f):
def inner(people):
people=sorted(people,key=lambda x: int(x[2]))
return [f(p) for p in people]
return inner | [
"rrangarajan.85@gmail.com"
] | rrangarajan.85@gmail.com |
014b7ef0141fa10bec4ed60ff73f77ca036a6182 | b354c139723db370b69083071a7799052408fc24 | /lab4/mlutils.py | 45db00d5d60efdd882ef5d571fafa434b465448c | [] | no_license | kristijanbartol/Machine-Learning | b985ac070f0da7323931597c9bec8fc4d549cfb6 | da4df84b969900de7439a373e1962882cd9b44bf | refs/heads/master | 2021-09-23T20:22:04.572582 | 2018-09-27T11:52:07 | 2018-09-27T11:52:07 | 108,148,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,994 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.datasets import make_classification
from sklearn.neighbors import KNeighborsClassifier
def plot_2d_clf_problem(X, y, h=None):
'''
Plots a two-dimensional labeled dataset (X,y) and, if function h(x) is given,
the decision surfaces.
'''
assert X.shape[1] == 2, "Dataset is not two-dimensional"
if h!=None :
# Create a mesh to plot in
r = 0.02 # mesh resolution
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, r),
np.arange(y_min, y_max, r))
XX=np.c_[xx.ravel(), yy.ravel()]
try:
Z_test = h(XX)
if Z_test.shape == ():
# h returns a scalar when applied to a matrix; map explicitly
Z = np.array(list(map(h,XX)))
else :
Z = Z_test
except ValueError:
# can't apply to a matrix; map explicitly
Z = np.array(list(map(h,XX)))
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.Pastel1)
# Plot the dataset
plt.scatter(X[:,0],X[:,1], c=y, cmap=plt.cm.tab20b, marker='o', s=50);
plt.show()
def plot_2d_svc_problem(X, y, svc=None):
'''
Plots a two-dimensional labeled dataset (X,y) and, if SVC object is given,
the decision surfaces (with margin as well).
'''
assert X.shape[1] == 2, "Dataset is not two-dimensional"
if svc!=None :
# Create a mesh to plot in
r = 0.03 # mesh resolution
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, r),
np.arange(y_min, y_max, r))
XX=np.c_[xx.ravel(), yy.ravel()]
Z = np.array([svc_predict(svc, x) for x in XX])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.Pastel1)
# Plot the dataset
plt.scatter(X[:,0],X[:,1], c=y, cmap=plt.cm.Paired, marker='o', s=50)
#plt.show()
def svc_predict(svc, x) :
h = svc.decision_function([x])
if h >= -1 and h <= 1:
return 0.5
else:
return max(-1, min(1, h))
def plot_error_surface(err, c_range=(0,5), g_range=(0,5)):
c1, c2 = c_range[0], c_range[1]
g1, g2 = g_range[0], g_range[1]
plt.xticks(range(0,g2-g1+1,5),range(g1,g2,5)); plt.xlabel("gamma")
plt.yticks(range(0,c2-c1+1,5),range(c1,c2,5)); plt.ylabel("C")
p = plt.contour(err);
plt.imshow(1-err, interpolation='bilinear', origin='lower',cmap=plt.cm.gray)
plt.clabel(p, inline=1, fontsize=10)
plt.show()
def knn_eval(n_instances=100, n_features=2, n_classes=2, n_informative=2,
test_size=0.3, k_range=(1, 20), n_trials=100):
train_errors = []
test_errors = []
ks = list(range(k_range[0], k_range[1] + 1))
for i in range(0, n_trials):
X, y = make_classification(n_instances, n_features, n_classes=n_classes,
n_informative=n_informative, n_redundant=0, n_clusters_per_class=1)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size)
train = []
test = []
for k in ks:
knn = KNeighborsClassifier(n_neighbors=k)
knn.fit(X_train, y_train)
train.append(1 - knn.score(X_train, y_train))
test.append(1 - knn.score(X_test, y_test))
train_errors.append(train)
test_errors.append(test)
train_errors = np.mean(np.array(train_errors), axis=0)
test_errors = np.mean(np.array(test_errors), axis=0)
best_k = ks[np.argmin(test_errors)]
return ks, best_k, train_errors, test_errors | [
"kristijan.bartol@gmail.com"
] | kristijan.bartol@gmail.com |
90a27c3b480f0cc80ed8ee801bdadd35c64a12c3 | 1a356165035504853ea8c6b721c547d2cea0c18d | /Player/greatest.py | c3d077633cbd75d104f4ac68e6d4f83944d47d7c | [] | no_license | Anjali1409/Python-Programming | 93f128c7b724486505a47e55bc71bdd9872b4a3e | 7af4bb946f17cb7f0ef1bb1c21fc149dc3af8285 | refs/heads/master | 2021-04-28T04:05:36.276091 | 2018-06-10T09:46:20 | 2018-06-10T09:46:20 | 122,153,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | i=int(input())
j=int(input())
k=int(input())
if(i>j and i>k):
print(i)
elif(j>i and j>k):
print(j)
else:
print(k)
| [
"noreply@github.com"
] | Anjali1409.noreply@github.com |
7a65e54865e002c36b16fb0dc338699d283732c9 | 80b489a53f7f211a09920affa5998a0724d83e71 | /webapp/venv/lib/python2.7/site-packages/alembic/ddl/mysql.py | 96f42f382d97e602f22fcb1113185859ced6fda1 | [
"MIT"
] | permissive | mustafatasdemir/apkinspector | d9ec9d85da5e0014edaf0d98119561bf3f87dffc | 1bd0d044b7daef4efda21c985393f8d73722a074 | refs/heads/master | 2016-09-06T03:25:49.212735 | 2014-12-12T03:52:56 | 2014-12-12T03:52:56 | 24,392,752 | 3 | 0 | null | 2014-10-28T03:57:43 | 2014-09-23T23:10:55 | Java | UTF-8 | Python | false | false | 8,123 | py | from sqlalchemy.ext.compiler import compiles
from sqlalchemy import types as sqltypes
from sqlalchemy import schema
from ..compat import string_types
from .. import util
from .impl import DefaultImpl
from .base import ColumnNullable, ColumnName, ColumnDefault, \
ColumnType, AlterColumn, format_column_name, \
format_server_default
from .base import alter_table
class MySQLImpl(DefaultImpl):
__dialect__ = 'mysql'
transactional_ddl = False
def alter_column(self, table_name, column_name,
nullable=None,
server_default=False,
name=None,
type_=None,
schema=None,
autoincrement=None,
existing_type=None,
existing_server_default=None,
existing_nullable=None,
existing_autoincrement=None
):
if name is not None:
self._exec(
MySQLChangeColumn(
table_name, column_name,
schema=schema,
newname=name,
nullable=nullable if nullable is not None else
existing_nullable
if existing_nullable is not None
else True,
type_=type_ if type_ is not None else existing_type,
default=server_default if server_default is not False
else existing_server_default,
autoincrement=autoincrement if autoincrement is not None
else existing_autoincrement
)
)
elif nullable is not None or \
type_ is not None or \
autoincrement is not None:
self._exec(
MySQLModifyColumn(
table_name, column_name,
schema=schema,
newname=name if name is not None else column_name,
nullable=nullable if nullable is not None else
existing_nullable
if existing_nullable is not None
else True,
type_=type_ if type_ is not None else existing_type,
default=server_default if server_default is not False
else existing_server_default,
autoincrement=autoincrement if autoincrement is not None
else existing_autoincrement
)
)
elif server_default is not False:
self._exec(
MySQLAlterDefault(
table_name, column_name, server_default,
schema=schema,
)
)
def correct_for_autogen_constraints(self, conn_unique_constraints,
conn_indexes,
metadata_unique_constraints,
metadata_indexes):
removed = set()
for idx in list(conn_indexes):
# MySQL puts implicit indexes on FK columns, even if
# composite and even if MyISAM, so can't check this too easily
if idx.name == idx.columns.keys()[0]:
conn_indexes.remove(idx)
removed.add(idx.name)
# then remove indexes from the "metadata_indexes"
# that we've removed from reflected, otherwise they come out
# as adds (see #202)
for idx in list(metadata_indexes):
if idx.name in removed:
metadata_indexes.remove(idx)
class MySQLAlterDefault(AlterColumn):
def __init__(self, name, column_name, default, schema=None):
super(AlterColumn, self).__init__(name, schema=schema)
self.column_name = column_name
self.default = default
class MySQLChangeColumn(AlterColumn):
def __init__(self, name, column_name, schema=None,
newname=None,
type_=None,
nullable=None,
default=False,
autoincrement=None):
super(AlterColumn, self).__init__(name, schema=schema)
self.column_name = column_name
self.nullable = nullable
self.newname = newname
self.default = default
self.autoincrement = autoincrement
if type_ is None:
raise util.CommandError(
"All MySQL CHANGE/MODIFY COLUMN operations "
"require the existing type."
)
self.type_ = sqltypes.to_instance(type_)
class MySQLModifyColumn(MySQLChangeColumn):
pass
@compiles(ColumnNullable, 'mysql')
@compiles(ColumnName, 'mysql')
@compiles(ColumnDefault, 'mysql')
@compiles(ColumnType, 'mysql')
def _mysql_doesnt_support_individual(element, compiler, **kw):
raise NotImplementedError(
"Individual alter column constructs not supported by MySQL"
)
@compiles(MySQLAlterDefault, "mysql")
def _mysql_alter_default(element, compiler, **kw):
return "%s ALTER COLUMN %s %s" % (
alter_table(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
"SET DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DROP DEFAULT"
)
@compiles(MySQLModifyColumn, "mysql")
def _mysql_modify_column(element, compiler, **kw):
return "%s MODIFY %s %s" % (
alter_table(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
_mysql_colspec(
compiler,
nullable=element.nullable,
server_default=element.default,
type_=element.type_,
autoincrement=element.autoincrement
),
)
@compiles(MySQLChangeColumn, "mysql")
def _mysql_change_column(element, compiler, **kw):
return "%s CHANGE %s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
format_column_name(compiler, element.newname),
_mysql_colspec(
compiler,
nullable=element.nullable,
server_default=element.default,
type_=element.type_,
autoincrement=element.autoincrement
),
)
def _render_value(compiler, expr):
if isinstance(expr, string_types):
return "'%s'" % expr
else:
return compiler.sql_compiler.process(expr)
def _mysql_colspec(compiler, nullable, server_default, type_,
autoincrement):
spec = "%s %s" % (
compiler.dialect.type_compiler.process(type_),
"NULL" if nullable else "NOT NULL"
)
if autoincrement:
spec += " AUTO_INCREMENT"
if server_default is not False and server_default is not None:
spec += " DEFAULT %s" % _render_value(compiler, server_default)
return spec
@compiles(schema.DropConstraint, "mysql")
def _mysql_drop_constraint(element, compiler, **kw):
"""Redefine SQLAlchemy's drop constraint to
raise errors for invalid constraint type."""
constraint = element.element
if isinstance(constraint, (schema.ForeignKeyConstraint,
schema.PrimaryKeyConstraint,
schema.UniqueConstraint)
):
return compiler.visit_drop_constraint(element, **kw)
elif isinstance(constraint, schema.CheckConstraint):
raise NotImplementedError(
"MySQL does not support CHECK constraints.")
else:
raise NotImplementedError(
"No generic 'DROP CONSTRAINT' in MySQL - "
"please specify constraint type")
| [
"mustafa.tasdemir@hotmail.com.tr"
] | mustafa.tasdemir@hotmail.com.tr |
b6a695509f4c932fce5594d2924313a6581f08bd | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /_ORGS/NPM/node/deps/v8/tools/release/test_scripts.py | 7cf5d141301c575c3186e2488597b510374b586d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"SunPro"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 34,860 | py | #!/usr/bin/env python
# Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# for py2/py3 compatibility
from __future__ import print_function
import os
import shutil
import tempfile
import traceback
import unittest
import auto_push
from auto_push import LastReleaseBailout
import auto_roll
import common_includes
from common_includes import *
import create_release
from create_release import *
import merge_to_branch
from merge_to_branch import MergeToBranch
from auto_tag import AutoTag
import roll_merge
from roll_merge import RollMerge
TEST_CONFIG = {
"DEFAULT_CWD": None,
"BRANCHNAME": "test-prepare-push",
"PERSISTFILE_BASENAME": "/tmp/test-create-releases-tempfile",
"PATCH_FILE": "/tmp/test-v8-create-releases-tempfile-tempfile-patch",
"COMMITMSG_FILE": "/tmp/test-v8-create-releases-tempfile-commitmsg",
"CHROMIUM": "/tmp/test-create-releases-tempfile-chromium",
"SETTINGS_LOCATION": None,
"ALREADY_MERGING_SENTINEL_FILE":
"/tmp/test-merge-to-branch-tempfile-already-merging",
"TEMPORARY_PATCH_FILE": "/tmp/test-merge-to-branch-tempfile-temporary-patch",
}
AUTO_PUSH_ARGS = [
"-a", "author@chromium.org",
"-r", "reviewer@chromium.org",
]
class ToplevelTest(unittest.TestCase):
def testSaniniziteVersionTags(self):
self.assertEquals("4.8.230", SanitizeVersionTag("4.8.230"))
self.assertEquals("4.8.230", SanitizeVersionTag("tags/4.8.230"))
self.assertEquals(None, SanitizeVersionTag("candidate"))
def testNormalizeVersionTags(self):
input = ["4.8.230",
"tags/4.8.230",
"tags/4.8.224.1",
"4.8.224.1",
"4.8.223.1",
"tags/4.8.223",
"tags/4.8.231",
"candidates"]
expected = ["4.8.230",
"4.8.230",
"4.8.224.1",
"4.8.224.1",
"4.8.223.1",
"4.8.223",
"4.8.231",
]
self.assertEquals(expected, NormalizeVersionTags(input))
def Cmd(*args, **kwargs):
"""Convenience function returning a shell command test expectation."""
return {
"name": "command",
"args": args,
"ret": args[-1],
"cb": kwargs.get("cb"),
"cwd": kwargs.get("cwd", TEST_CONFIG["DEFAULT_CWD"]),
}
def RL(text, cb=None):
"""Convenience function returning a readline test expectation."""
return {
"name": "readline",
"args": [],
"ret": text,
"cb": cb,
"cwd": None,
}
def URL(*args, **kwargs):
"""Convenience function returning a readurl test expectation."""
return {
"name": "readurl",
"args": args[:-1],
"ret": args[-1],
"cb": kwargs.get("cb"),
"cwd": None,
}
class SimpleMock(object):
def __init__(self):
self._recipe = []
self._index = -1
def Expect(self, recipe):
self._recipe = recipe
def Call(self, name, *args, **kwargs): # pragma: no cover
self._index += 1
try:
expected_call = self._recipe[self._index]
except IndexError:
raise NoRetryException("Calling %s %s" % (name, " ".join(args)))
if not isinstance(expected_call, dict):
raise NoRetryException("Found wrong expectation type for %s %s" %
(name, " ".join(args)))
if expected_call["name"] != name:
raise NoRetryException("Expected action: %s %s - Actual: %s" %
(expected_call["name"], expected_call["args"], name))
# Check if the given working directory matches the expected one.
if expected_call["cwd"] != kwargs.get("cwd"):
raise NoRetryException("Expected cwd: %s in %s %s - Actual: %s" %
(expected_call["cwd"],
expected_call["name"],
expected_call["args"],
kwargs.get("cwd")))
# The number of arguments in the expectation must match the actual
# arguments.
if len(args) > len(expected_call['args']):
raise NoRetryException("When calling %s with arguments, the "
"expectations must consist of at least as many arguments." %
name)
# Compare expected and actual arguments.
for (expected_arg, actual_arg) in zip(expected_call['args'], args):
if expected_arg != actual_arg:
raise NoRetryException("Expected: %s - Actual: %s" %
(expected_arg, actual_arg))
# The expected call contains an optional callback for checking the context
# at the time of the call.
if expected_call['cb']:
try:
expected_call['cb']()
except:
tb = traceback.format_exc()
raise NoRetryException("Caught exception from callback: %s" % tb)
# If the return value is an exception, raise it instead of returning.
if isinstance(expected_call['ret'], Exception):
raise expected_call['ret']
return expected_call['ret']
def AssertFinished(self): # pragma: no cover
if self._index < len(self._recipe) -1:
raise NoRetryException("Called mock too seldom: %d vs. %d" %
(self._index, len(self._recipe)))
class ScriptTest(unittest.TestCase):
def MakeEmptyTempFile(self):
handle, name = tempfile.mkstemp()
os.close(handle)
self._tmp_files.append(name)
return name
def MakeEmptyTempDirectory(self):
name = tempfile.mkdtemp()
self._tmp_files.append(name)
return name
def WriteFakeVersionFile(self, major=3, minor=22, build=4, patch=0):
version_file = os.path.join(TEST_CONFIG["DEFAULT_CWD"], VERSION_FILE)
if not os.path.exists(os.path.dirname(version_file)):
os.makedirs(os.path.dirname(version_file))
with open(version_file, "w") as f:
f.write(" // Some line...\n")
f.write("\n")
f.write("#define V8_MAJOR_VERSION %s\n" % major)
f.write("#define V8_MINOR_VERSION %s\n" % minor)
f.write("#define V8_BUILD_NUMBER %s\n" % build)
f.write("#define V8_PATCH_LEVEL %s\n" % patch)
f.write(" // Some line...\n")
f.write("#define V8_IS_CANDIDATE_VERSION 0\n")
def WriteFakeWatchlistsFile(self):
watchlists_file = os.path.join(TEST_CONFIG["DEFAULT_CWD"], WATCHLISTS_FILE)
if not os.path.exists(os.path.dirname(watchlists_file)):
os.makedirs(os.path.dirname(watchlists_file))
with open(watchlists_file, "w") as f:
content = """
'merges': [
# Only enabled on branches created with tools/release/create_release.py
# 'v8-merges@googlegroups.com',
],
"""
f.write(content)
def MakeStep(self):
"""Convenience wrapper."""
options = ScriptsBase(TEST_CONFIG, self, self._state).MakeOptions([])
return MakeStep(step_class=Step, state=self._state,
config=TEST_CONFIG, side_effect_handler=self,
options=options)
def RunStep(self, script=CreateRelease, step_class=Step, args=None):
"""Convenience wrapper."""
args = args if args is not None else ["-m", "-a=author", "-r=reviewer", ]
return script(TEST_CONFIG, self, self._state).RunSteps([step_class], args)
def Call(self, fun, *args, **kwargs):
print("Calling %s with %s and %s" % (str(fun), str(args), str(kwargs)))
def Command(self, cmd, args="", prefix="", pipe=True, cwd=None):
print("%s %s" % (cmd, args))
print("in %s" % cwd)
return self._mock.Call("command", cmd + " " + args, cwd=cwd)
def ReadLine(self):
return self._mock.Call("readline")
def ReadURL(self, url, params):
if params is not None:
return self._mock.Call("readurl", url, params)
else:
return self._mock.Call("readurl", url)
def Sleep(self, seconds):
pass
def GetUTCStamp(self):
return "1000000"
def Expect(self, *args):
"""Convenience wrapper."""
self._mock.Expect(*args)
def setUp(self):
self._mock = SimpleMock()
self._tmp_files = []
self._state = {}
TEST_CONFIG["DEFAULT_CWD"] = self.MakeEmptyTempDirectory()
def tearDown(self):
if os.path.exists(TEST_CONFIG["PERSISTFILE_BASENAME"]):
shutil.rmtree(TEST_CONFIG["PERSISTFILE_BASENAME"])
# Clean up temps. Doesn't work automatically.
for name in self._tmp_files:
if os.path.isfile(name):
os.remove(name)
if os.path.isdir(name):
shutil.rmtree(name)
self._mock.AssertFinished()
def testGitMock(self):
self.Expect([Cmd("git --version", "git version 1.2.3"),
Cmd("git dummy", "")])
self.assertEquals("git version 1.2.3", self.MakeStep().Git("--version"))
self.assertEquals("", self.MakeStep().Git("dummy"))
def testCommonPrepareDefault(self):
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
self.MakeStep().CommonPrepare()
self.MakeStep().PrepareBranch()
def testCommonPrepareNoConfirm(self):
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("n"),
])
self.MakeStep().CommonPrepare()
self.assertRaises(Exception, self.MakeStep().PrepareBranch)
def testCommonPrepareDeleteBranchFailure(self):
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* %s" % TEST_CONFIG["BRANCHNAME"]),
RL("Y"),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], None),
])
self.MakeStep().CommonPrepare()
self.assertRaises(Exception, self.MakeStep().PrepareBranch)
def testInitialEnvironmentChecks(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
os.environ["EDITOR"] = "vi"
self.Expect([
Cmd("which vi", "/usr/bin/vi"),
])
self.MakeStep().InitialEnvironmentChecks(TEST_CONFIG["DEFAULT_CWD"])
def testTagTimeout(self):
self.Expect([
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\"Title\" origin/tag_name", ""),
])
args = ["--branch", "candidates", "ab12345"]
self._state["version"] = "tag_name"
self._state["commit_title"] = "Title"
self.assertRaises(Exception,
lambda: self.RunStep(RollMerge, TagRevision, args))
def testReadAndPersistVersion(self):
self.WriteFakeVersionFile(build=5)
step = self.MakeStep()
step.ReadAndPersistVersion()
self.assertEquals("3", step["major"])
self.assertEquals("22", step["minor"])
self.assertEquals("5", step["build"])
self.assertEquals("0", step["patch"])
def testRegex(self):
self.assertEqual("(issue 321)",
re.sub(r"BUG=v8:(.*)$", r"(issue \1)", "BUG=v8:321"))
self.assertEqual("(Chromium issue 321)",
re.sub(r"BUG=(.*)$", r"(Chromium issue \1)", "BUG=321"))
cl = " too little\n\ttab\ttab\n too much\n trailing "
cl = MSub(r"\t", r" ", cl)
cl = MSub(r"^ {1,7}([^ ])", r" \1", cl)
cl = MSub(r"^ {9,80}([^ ])", r" \1", cl)
cl = MSub(r" +$", r"", cl)
self.assertEqual(" too little\n"
" tab tab\n"
" too much\n"
" trailing", cl)
self.assertEqual("//\n#define V8_BUILD_NUMBER 3\n",
MSub(r"(?<=#define V8_BUILD_NUMBER)(?P<space>\s+)\d*$",
r"\g<space>3",
"//\n#define V8_BUILD_NUMBER 321\n"))
TAGS = """
4425.0
0.0.0.0
3.9.6
3.22.4
test_tag
"""
# Version as tag: 3.22.4.0. Version on master: 3.22.6.
# Make sure that the latest version is 3.22.6.0.
def testIncrementVersion(self):
self.Expect([
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git checkout -f origin/master -- include/v8-version.h",
"", cb=lambda: self.WriteFakeVersionFile(3, 22, 6)),
])
self.RunStep(CreateRelease, IncrementVersion)
self.assertEquals("3", self._state["new_major"])
self.assertEquals("22", self._state["new_minor"])
self.assertEquals("7", self._state["new_build"])
self.assertEquals("0", self._state["new_patch"])
def testBootstrapper(self):
work_dir = self.MakeEmptyTempDirectory()
class FakeScript(ScriptsBase):
def _Steps(self):
return []
# Use the test configuration without the fake testing default work dir.
fake_config = dict(TEST_CONFIG)
del(fake_config["DEFAULT_CWD"])
self.Expect([
Cmd("fetch v8", "", cwd=work_dir),
])
FakeScript(fake_config, self).Run(["--work-dir", work_dir])
def testCreateRelease(self):
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
# The version file on master has build level 5.
self.WriteFakeVersionFile(build=5)
commit_msg = """Version 3.22.5
TBR=reviewer@chromium.org"""
def CheckVersionCommit():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(commit_msg, commit)
version = FileToText(
os.path.join(TEST_CONFIG["DEFAULT_CWD"], VERSION_FILE))
self.assertTrue(re.search(r"#define V8_MINOR_VERSION\s+22", version))
self.assertTrue(re.search(r"#define V8_BUILD_NUMBER\s+5", version))
self.assertFalse(re.search(r"#define V8_BUILD_NUMBER\s+6", version))
self.assertTrue(re.search(r"#define V8_PATCH_LEVEL\s+0", version))
self.assertTrue(
re.search(r"#define V8_IS_CANDIDATE_VERSION\s+0", version))
expectations = [
Cmd("git fetch origin +refs/heads/*:refs/heads/*", ""),
Cmd("git checkout -f origin/master", "", cb=self.WriteFakeWatchlistsFile),
Cmd("git branch", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git checkout -f origin/master -- include/v8-version.h",
"", cb=self.WriteFakeVersionFile),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash", "Version 3.22.4\n"),
Cmd("git log -1 --format=%H release_hash^", "abc3\n"),
Cmd("git log --format=%H abc3..push_hash", "rev1\n"),
Cmd("git push origin push_hash:refs/heads/3.22.5", ""),
Cmd("git reset --hard origin/master", ""),
Cmd("git new-branch work-branch --upstream origin/3.22.5", ""),
Cmd("git checkout -f 3.22.4 -- include/v8-version.h", "",
cb=self.WriteFakeVersionFile),
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], "",
cb=CheckVersionCommit),
Cmd("git cl upload --send-mail "
"-f --bypass-hooks --no-autocc --message-file "
"\"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
Cmd("git cl land --bypass-hooks -f", ""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep="
"\"Version 3.22.5\" origin/3.22.5", "hsh_to_tag"),
Cmd("git tag 3.22.5 hsh_to_tag", ""),
Cmd("git push origin refs/tags/3.22.5:refs/tags/3.22.5", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch", "* master\n work-branch\n"),
Cmd("git branch -D work-branch", ""),
Cmd("git gc", ""),
]
self.Expect(expectations)
args = ["-a", "author@chromium.org",
"-r", "reviewer@chromium.org",
"--revision", "push_hash"]
CreateRelease(TEST_CONFIG, self).Run(args)
# Note: The version file is on build number 5 again in the end of this test
# since the git command that merges to master is mocked out.
# Check for correct content of the WATCHLISTS file
watchlists_content = FileToText(os.path.join(TEST_CONFIG["DEFAULT_CWD"],
WATCHLISTS_FILE))
expected_watchlists_content = """
'merges': [
# Only enabled on branches created with tools/release/create_release.py
'v8-merges@googlegroups.com',
],
"""
self.assertEqual(watchlists_content, expected_watchlists_content)
C_V8_22624_LOG = """V8 CL.
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22624 123
"""
C_V8_123455_LOG = """V8 CL.
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@123455 123
"""
C_V8_123456_LOG = """V8 CL.
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@123456 123
"""
ROLL_COMMIT_MSG = """Update V8 to version 3.22.4.
Summary of changes available at:
https://chromium.googlesource.com/v8/v8/+log/last_rol..roll_hsh
Please follow these instructions for assigning/CC'ing issues:
https://v8.dev/docs/triage-issues
Please close rolling in case of a roll revert:
https://v8-roll.appspot.com/
This only works with a Google account.
CQ_INCLUDE_TRYBOTS=luci.chromium.try:linux-blink-rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:linux_optional_gpu_tests_rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:mac_optional_gpu_tests_rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:win_optional_gpu_tests_rel
CQ_INCLUDE_TRYBOTS=luci.chromium.try:android_optional_gpu_tests_rel
R=reviewer@chromium.org"""
# Snippet from the original DEPS file.
FAKE_DEPS = """
vars = {
"v8_revision": "last_roll_hsh",
}
deps = {
"src/v8":
(Var("googlecode_url") % "v8") + "/" + Var("v8_branch") + "@" +
Var("v8_revision"),
}
"""
def testChromiumRollUpToDate(self):
TEST_CONFIG["CHROMIUM"] = self.MakeEmptyTempDirectory()
json_output_file = os.path.join(TEST_CONFIG["CHROMIUM"], "out.json")
TextToFile(self.FAKE_DEPS, os.path.join(TEST_CONFIG["CHROMIUM"], "DEPS"))
chrome_dir = TEST_CONFIG["CHROMIUM"]
self.Expect([
Cmd("git fetch origin", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("gclient getdep -r src/v8", "last_roll_hsh", cwd=chrome_dir),
Cmd("git describe --tags last_roll_hsh", "3.22.4"),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git rev-list --max-age=395200 --tags",
"bad_tag\nroll_hsh\nhash_123"),
Cmd("git describe --tags bad_tag", ""),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags hash_123", "3.22.3"),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags hash_123", "3.22.3"),
])
result = auto_roll.AutoRoll(TEST_CONFIG, self).Run(
AUTO_PUSH_ARGS + [
"-c", TEST_CONFIG["CHROMIUM"],
"--json-output", json_output_file])
self.assertEquals(0, result)
json_output = json.loads(FileToText(json_output_file))
self.assertEquals("up_to_date", json_output["monitoring_state"])
def testChromiumRoll(self):
# Setup fake directory structures.
TEST_CONFIG["CHROMIUM"] = self.MakeEmptyTempDirectory()
json_output_file = os.path.join(TEST_CONFIG["CHROMIUM"], "out.json")
TextToFile(self.FAKE_DEPS, os.path.join(TEST_CONFIG["CHROMIUM"], "DEPS"))
TextToFile("", os.path.join(TEST_CONFIG["CHROMIUM"], ".git"))
chrome_dir = TEST_CONFIG["CHROMIUM"]
os.makedirs(os.path.join(chrome_dir, "v8"))
def WriteDeps():
TextToFile("Some line\n \"v8_revision\": \"22624\",\n some line",
os.path.join(chrome_dir, "DEPS"))
expectations = [
Cmd("git fetch origin", ""),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("gclient getdep -r src/v8", "last_roll_hsh", cwd=chrome_dir),
Cmd("git describe --tags last_roll_hsh", "3.22.3.1"),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git rev-list --max-age=395200 --tags",
"bad_tag\nroll_hsh\nhash_123"),
Cmd("git describe --tags bad_tag", ""),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags hash_123", "3.22.3"),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git log -1 --format=%s roll_hsh", "Version 3.22.4\n"),
Cmd("git describe --tags roll_hsh", "3.22.4"),
Cmd("git describe --tags last_roll_hsh", "3.22.2.1"),
Cmd("git status -s -uno", "", cwd=chrome_dir),
Cmd("git checkout -f main", "", cwd=chrome_dir),
Cmd("git branch", "", cwd=chrome_dir),
Cmd("git pull", "", cwd=chrome_dir),
Cmd("git fetch origin", ""),
Cmd("git new-branch work-branch", "", cwd=chrome_dir),
Cmd("gclient setdep -r src/v8@roll_hsh", "", cb=WriteDeps,
cwd=chrome_dir),
Cmd(("git commit -am \"%s\" "
"--author \"author@chromium.org <author@chromium.org>\"" %
self.ROLL_COMMIT_MSG),
"", cwd=chrome_dir),
Cmd("git cl upload --send-mail -f "
"--cq-dry-run --set-bot-commit --bypass-hooks", "",
cwd=chrome_dir),
Cmd("git checkout -f main", "", cwd=chrome_dir),
Cmd("git branch -D work-branch", "", cwd=chrome_dir),
]
self.Expect(expectations)
args = ["-a", "author@chromium.org", "-c", chrome_dir,
"-r", "reviewer@chromium.org", "--json-output", json_output_file]
auto_roll.AutoRoll(TEST_CONFIG, self).Run(args)
deps = FileToText(os.path.join(chrome_dir, "DEPS"))
self.assertTrue(re.search("\"v8_revision\": \"22624\"", deps))
json_output = json.loads(FileToText(json_output_file))
self.assertEquals("success", json_output["monitoring_state"])
def testCheckLastPushRecently(self):
self.Expect([
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash",
"Version 3.22.4 (based on abc3)\n"),
Cmd("git log --format=%H abc3..abc123", "\n"),
])
self._state["candidate"] = "abc123"
self.assertEquals(0, self.RunStep(
auto_push.AutoPush, LastReleaseBailout, AUTO_PUSH_ARGS))
def testAutoPush(self):
self.Expect([
Cmd("git fetch", ""),
Cmd("git fetch origin +refs/heads/lkgr:refs/heads/lkgr", ""),
Cmd("git show-ref -s refs/heads/lkgr", "abc123\n"),
Cmd("git fetch origin +refs/tags/*:refs/tags/*", ""),
Cmd("git tag", self.TAGS),
Cmd("git log -1 --format=%H 3.22.4", "release_hash\n"),
Cmd("git log -1 --format=%s release_hash",
"Version 3.22.4 (based on abc3)\n"),
Cmd("git log --format=%H abc3..abc123", "some_stuff\n"),
])
auto_push.AutoPush(TEST_CONFIG, self).Run(AUTO_PUSH_ARGS + ["--push"])
state = json.loads(FileToText("%s-state.json"
% TEST_CONFIG["PERSISTFILE_BASENAME"]))
self.assertEquals("abc123", state["candidate"])
def testRollMerge(self):
TEST_CONFIG["ALREADY_MERGING_SENTINEL_FILE"] = self.MakeEmptyTempFile()
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
self.WriteFakeVersionFile(build=5)
os.environ["EDITOR"] = "vi"
extra_patch = self.MakeEmptyTempFile()
def VerifyPatch(patch):
return lambda: self.assertEquals(patch,
FileToText(TEST_CONFIG["TEMPORARY_PATCH_FILE"]))
msg = """Version 3.22.5.1 (cherry-pick)
Merged ab12345
Merged ab23456
Merged ab34567
Merged ab45678
Merged ab56789
Title4
Title2
Title3
Title1
Revert "Something"
BUG=123,234,345,456,567,v8:123
"""
def VerifyLand():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(msg, commit)
version = FileToText(
os.path.join(TEST_CONFIG["DEFAULT_CWD"], VERSION_FILE))
self.assertTrue(re.search(r"#define V8_MINOR_VERSION\s+22", version))
self.assertTrue(re.search(r"#define V8_BUILD_NUMBER\s+5", version))
self.assertTrue(re.search(r"#define V8_PATCH_LEVEL\s+1", version))
self.assertTrue(
re.search(r"#define V8_IS_CANDIDATE_VERSION\s+0", version))
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git new-branch %s --upstream refs/remotes/origin/candidates" %
TEST_CONFIG["BRANCHNAME"], ""),
Cmd(("git log --format=%H --grep=\"Port ab12345\" "
"--reverse origin/master"),
"ab45678\nab23456"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd(("git log --format=%H --grep=\"Port ab23456\" "
"--reverse origin/master"),
""),
Cmd(("git log --format=%H --grep=\"Port ab34567\" "
"--reverse origin/master"),
"ab56789"),
Cmd("git log -1 --format=%s ab56789", "Title3"),
RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
# Simulate git being down which stops the script.
Cmd("git log -1 --format=%s ab12345", None),
# Restart script in the failing step.
Cmd("git log -1 --format=%s ab12345", "Title4"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd("git log -1 --format=%s ab34567", "Title3"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab56789", "Revert \"Something\""),
Cmd("git log -1 ab12345", "Title4\nBUG=123\nBUG=234"),
Cmd("git log -1 ab23456", "Title2\n BUG = v8:123,345"),
Cmd("git log -1 ab34567", "Title3\nBUG=567, 456"),
Cmd("git log -1 ab45678", "Title1\nBUG="),
Cmd("git log -1 ab56789", "Revert \"Something\"\nBUG=none"),
Cmd("git log -1 -p ab12345", "patch4"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch4")),
Cmd("git log -1 -p ab23456", "patch2"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch2")),
Cmd("git log -1 -p ab34567", "patch3"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch3")),
Cmd("git log -1 -p ab45678", "patch1"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch1")),
Cmd("git log -1 -p ab56789", "patch5\n"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch5\n")),
Cmd("git apply --index --reject \"%s\"" % extra_patch, ""),
RL("Y"), # Automatically increment patch level?
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
RL("reviewer@chromium.org"), # V8 reviewer.
Cmd("git cl upload --send-mail -r \"reviewer@chromium.org\" "
"--bypass-hooks --cc \"ulan@chromium.org\"", ""),
Cmd("git checkout -f %s" % TEST_CONFIG["BRANCHNAME"], ""),
RL("LGTM"), # Enter LGTM for V8 CL.
Cmd("git cl presubmit", "Presubmit successfull\n"),
Cmd("git cl land -f --bypass-hooks", "Closing issue\n",
cb=VerifyLand),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\""
"Version 3.22.5.1 (cherry-pick)"
"\" refs/remotes/origin/candidates",
""),
Cmd("git fetch", ""),
Cmd("git log -1 --format=%H --grep=\""
"Version 3.22.5.1 (cherry-pick)"
"\" refs/remotes/origin/candidates",
"hsh_to_tag"),
Cmd("git tag 3.22.5.1 hsh_to_tag", ""),
Cmd("git push origin refs/tags/3.22.5.1:refs/tags/3.22.5.1", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
# ab12345 and ab34567 are patches. ab23456 (included) and ab45678 are the
# MIPS ports of ab12345. ab56789 is the MIPS port of ab34567.
args = ["-f", "-p", extra_patch, "--branch", "candidates",
"ab12345", "ab23456", "ab34567"]
# The first run of the script stops because of git being down.
self.assertRaises(GitFailedException,
lambda: RollMerge(TEST_CONFIG, self).Run(args))
# Test that state recovery after restarting the script works.
args += ["-s", "4"]
RollMerge(TEST_CONFIG, self).Run(args)
def testMergeToBranch(self):
TEST_CONFIG["ALREADY_MERGING_SENTINEL_FILE"] = self.MakeEmptyTempFile()
TextToFile("", os.path.join(TEST_CONFIG["DEFAULT_CWD"], ".git"))
self.WriteFakeVersionFile(build=5)
os.environ["EDITOR"] = "vi"
extra_patch = self.MakeEmptyTempFile()
def VerifyPatch(patch):
return lambda: self.assertEquals(patch,
FileToText(TEST_CONFIG["TEMPORARY_PATCH_FILE"]))
info_msg = ("NOTE: This script will no longer automatically "
"update include/v8-version.h "
"and create a tag. This is done automatically by the autotag bot. "
"Please call the merge_to_branch.py with --help for more information.")
msg = """Merged: Squashed multiple commits.
Merged: Title4
Revision: ab12345
Merged: Title2
Revision: ab23456
Merged: Title3
Revision: ab34567
Merged: Title1
Revision: ab45678
Merged: Revert \"Something\"
Revision: ab56789
BUG=123,234,345,456,567,v8:123
NOTRY=true
NOPRESUBMIT=true
NOTREECHECKS=true
"""
def VerifyLand():
commit = FileToText(TEST_CONFIG["COMMITMSG_FILE"])
self.assertEquals(msg, commit)
self.Expect([
Cmd("git status -s -uno", ""),
Cmd("git checkout -f origin/master", ""),
Cmd("git fetch", ""),
Cmd("git branch", " branch1\n* branch2\n"),
Cmd("git new-branch %s --upstream refs/remotes/origin/candidates" %
TEST_CONFIG["BRANCHNAME"], ""),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab12345\" "
"--reverse origin/master"),
"ab45678\nab23456"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab23456\" "
"--reverse origin/master"),
""),
Cmd(("git log --format=%H --grep=\"^[Pp]ort ab34567\" "
"--reverse origin/master"),
"ab56789"),
Cmd("git log -1 --format=%s ab56789", "Title3"),
RL("Y"), # Automatically add corresponding ports (ab34567, ab56789)?
# Simulate git being down which stops the script.
Cmd("git log -1 --format=%s ab12345", None),
# Restart script in the failing step.
Cmd("git log -1 --format=%s ab12345", "Title4"),
Cmd("git log -1 --format=%s ab23456", "Title2"),
Cmd("git log -1 --format=%s ab34567", "Title3"),
Cmd("git log -1 --format=%s ab45678", "Title1"),
Cmd("git log -1 --format=%s ab56789", "Revert \"Something\""),
Cmd("git log -1 ab12345", "Title4\nBUG=123\nBUG=234"),
Cmd("git log -1 ab23456", "Title2\n BUG = v8:123,345"),
Cmd("git log -1 ab34567", "Title3\nBug: 567, 456,345"),
Cmd("git log -1 ab45678", "Title1\nBug:"),
Cmd("git log -1 ab56789", "Revert \"Something\"\nBUG=none"),
Cmd("git log -1 -p ab12345", "patch4"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch4")),
Cmd("git log -1 -p ab23456", "patch2"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch2")),
Cmd("git log -1 -p ab34567", "patch3"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch3")),
Cmd("git log -1 -p ab45678", "patch1"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch1")),
Cmd("git log -1 -p ab56789", "patch5\n"),
Cmd(("git apply --index --reject \"%s\"" %
TEST_CONFIG["TEMPORARY_PATCH_FILE"]),
"", cb=VerifyPatch("patch5\n")),
Cmd("git apply --index --reject \"%s\"" % extra_patch, ""),
Cmd("git commit -aF \"%s\"" % TEST_CONFIG["COMMITMSG_FILE"], ""),
RL("reviewer@chromium.org"), # V8 reviewer.
Cmd("git cl upload --send-mail -r \"reviewer@chromium.org\" "
"--bypass-hooks --cc \"ulan@chromium.org\"", ""),
Cmd("git checkout -f %s" % TEST_CONFIG["BRANCHNAME"], ""),
RL("LGTM"), # Enter LGTM for V8 CL.
Cmd("git cl presubmit", "Presubmit successfull\n"),
Cmd("git cl land -f --bypass-hooks", "Closing issue\n",
cb=VerifyLand),
Cmd("git checkout -f origin/master", ""),
Cmd("git branch -D %s" % TEST_CONFIG["BRANCHNAME"], ""),
])
# ab12345 and ab34567 are patches. ab23456 (included) and ab45678 are the
# MIPS ports of ab12345. ab56789 is the MIPS port of ab34567.
args = ["-f", "-p", extra_patch, "--branch", "candidates",
"ab12345", "ab23456", "ab34567"]
# The first run of the script stops because of git being down.
self.assertRaises(GitFailedException,
lambda: MergeToBranch(TEST_CONFIG, self).Run(args))
# Test that state recovery after restarting the script works.
args += ["-s", "4"]
MergeToBranch(TEST_CONFIG, self).Run(args)
if __name__ == '__main__':
unittest.main()
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
97b645a76edb21baf3e7d7f565b58fdb42d9e961 | 48cbfd4c3634a45da8cd4616013d775ff00a39e5 | /20201001_1/task1.py | f27c17eeda5565a43f27ef096abd00126d5520e9 | [] | no_license | hakenlaken/pythonprac | c3393f0eb1d8a569c6ee853cf6da9fe959bbedf8 | 2b5fe81b728ac10e3fd25b7223f9df25b0155752 | refs/heads/master | 2023-06-05T22:32:36.248486 | 2021-06-25T08:16:10 | 2021-06-25T08:16:10 | 297,048,545 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | # Задача_1: написать функцию вычитания двух объектов
def fun(a, b):
if type(a) is str:
res = ""
for d in [c for c in a if c not in b]:
res += d
return res
elif hasattr(a, "__sub__"):
return a - b
else:
return type(a)(c for c in a if c not in b)
inp = eval(input())
print(fun(inp[0], inp[1]))
| [
"lav@MacBook-LAV.local"
] | lav@MacBook-LAV.local |
26b18e37eff8d9418bc37752e4f8fe2f947df0b1 | 07ec5a0b3ba5e70a9e0fb65172ea6b13ef4115b8 | /lib/python3.6/site-packages/tensorflow/contrib/predictor/contrib_estimator_predictor.py | afeb0cc8d0fc8739a534d1ebdf77758c20ae8948 | [] | no_license | cronos91/ML-exercise | 39c5cd7f94bb90c57450f9a85d40c2f014900ea4 | 3b7afeeb6a7c87384049a9b87cac1fe4c294e415 | refs/heads/master | 2021-05-09T22:02:55.131977 | 2017-12-14T13:50:44 | 2017-12-14T13:50:44 | 118,736,043 | 0 | 0 | null | 2018-01-24T08:30:23 | 2018-01-24T08:30:22 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:f616a461aa1df558fae47ff2bc5c7d16ceb00620172d2724deccf893b3da6f46
size 3152
| [
"seokinj@jangseog-in-ui-MacBook-Pro.local"
] | seokinj@jangseog-in-ui-MacBook-Pro.local |
ca1b7ba1ced0801086175a3a04c5cadc63f7bb52 | c190336a28dc3a2a20e0448551ffcb3a24ebd08b | /app1.py | fb7ba06b7592d75084c4b15736d944ecdc8a5a84 | [] | no_license | ThomsonRen/flying-dog-beers | bf3af72f7e70f09b23a73ccb7fc18e6eb5048621 | dce2d96fb9fe8ffe8aec319fb8a37a146fb86685 | refs/heads/master | 2021-05-18T16:05:26.204152 | 2020-04-17T12:18:36 | 2020-04-17T12:18:36 | 251,309,362 | 0 | 0 | null | 2020-03-30T13:14:11 | 2020-03-30T13:14:11 | null | UTF-8 | Python | false | false | 2,620 | py | # -*- coding: utf-8 -*-
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import pandas as pd
import plotly.express as px
# df = px.data.gapminder()
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
# df = df[df['continent'] == 'Asia'] # 提取亚洲数据
# fig = px.line(df, # 指定数据的名字
# x='year', # 年份为横坐标
# y='lifeExp', # 预期寿命为纵坐标
# color='country') # 以国家进行染色
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
server = app.server
app.layout = html.Div([
dcc.Input(id='my-id', value='initial value', type='text'),
html.Div(id='my-div'),
dcc.Graph(id='graph-with-slider'),
# dcc.Graph(figure=fig),
dcc.Slider(
id='year-slider', # 指定变量的名字
min=df['year'].min(), # 最小值
max=df['year'].max(), # 最大值
value=df['year'].min(), # 初始值
marks={str(year): str(year) for year in df['year'].unique()},
step=None
),
])
@app.callback(
Output(component_id='my-div', component_property='children'),
[Input(component_id='my-id', component_property='value')]
)
def update_output_div(input_value):
return 'You\'ve entered "{}"'.format(input_value)
@app.callback(
Output('graph-with-slider', 'figure'),
[Input('year-slider', 'value')])
# 函数的输入值为slider的一个属性,函数的输出值为一张图片(的字典)
def update_figure(selected_year):
filtered_df = df[df.year == selected_year]
traces = []
for i in filtered_df.continent.unique():
df_by_continent = filtered_df[filtered_df['continent'] == i]
traces.append(dict(
x=df_by_continent['gdpPercap'],
y=df_by_continent['lifeExp'],
text=df_by_continent['country'],
mode='markers',
opacity=0.7,
marker={
'size': 15,
'line': {'width': 0.5, 'color': 'white'}
},
name=i
))
return {
'data': traces,
'layout': dict(
xaxis={'type': 'log', 'title': 'GDP Per Capita',
'range':[2.3, 4.8]},
yaxis={'title': 'Life Expectancy', 'range': [20, 90]},
margin={'l': 40, 'b': 40, 't': 10, 'r': 10},
legend={'x': 0, 'y': 1},
hovermode='closest',
transition = {'duration': 1500},
)
}
if __name__ == '__main__':
app.run_server() | [
"rentongxin@sjtu.edu.cn"
] | rentongxin@sjtu.edu.cn |
b327b6904a68a6fac9133923566f52491e3e7255 | 96db160b6075e49101686eb4947fefb2e0909985 | /Store/views.py | 3cba59847230429be847c64618fcdb291698a251 | [] | no_license | hdforoozan/Restaurant | 7c43b1c89e8edc504a27dac2515313b979069c88 | d9420dc5dcd42bcb6c5952474ef996845ec4381c | refs/heads/master | 2022-12-09T13:38:57.970747 | 2019-09-29T20:45:10 | 2019-09-29T20:45:10 | 208,814,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,784 | py | from datetime import datetime
from django.shortcuts import render
from .models import Store, Employee, Manager
from Food.models import Food
from django.urls import reverse_lazy
from django.views.generic import TemplateView,DetailView,ListView, CreateView,DeleteView,UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from Cart.forms import CartAddFoodForm
from Order.models import Order
from Comment.forms import CommentForm
from Comment.models import Comment
from Food.forms import SearchForm
class HomePageView(TemplateView):
template_name = 'home.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['most_sell_foods'] = Food.objects.filter(name__icontains='p')
context['cheapest_foods'] = Food.objects.filter(price__lte=10)
context['search_form'] = SearchForm()
return context
##############################################################
# Store Model Views
##############################################################
class StoreListView(LoginRequiredMixin, ListView):
model = Store
context_object_name = 'stores'
class StoreDetailView(LoginRequiredMixin, DetailView):
model = Store
context_object_name = 'store'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
store = Store.objects.get(id=self.kwargs['pk'])
context['foods'] = Food.objects.filter(stores=store).filter(run_out=False)
context['employees'] = Employee.objects.filter(store__id=self.kwargs['pk'])
paid_orders = Order.objects.filter(paid=True)
monthly_income = 0
for item in paid_orders:
if item.store_id == self.kwargs['pk']:
monthly_income += item.get_total_cost()
context['monthly_income'] = monthly_income
return context
class StoreCreateView(LoginRequiredMixin, CreateView):
model = Store
fields = ['user','manager','foods','branch_num','image','pub_date','address']
class StoreUpdateView(LoginRequiredMixin, UpdateView):
model = Store
fields = ['manager','foods','branch_num','image','address']
context_object_name = 'store'
template_name = 'Store/store_update_form.html'
class StoreDeleteView(LoginRequiredMixin, DeleteView):
model = Store
success_url = reverse_lazy('store-list')
context_object_name = 'store'
class StoreFoodDetailView(LoginRequiredMixin, DetailView):
model = Store
context_object_name = 'store'
template_name = 'Store/store_food_detail.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
store = Store.objects.get(id=self.kwargs['pk'])
food = Food.objects.filter(stores=store).get(id=self.kwargs['food_id'])
context['food'] = food
context['cart_food_form'] = CartAddFoodForm()
context['comment_form'] = CommentForm()
comments = Comment.objects.filter(food=food)[:5]
comment_times = []
now = datetime.now()
date_format = "%Y-%m-%d %H:%M:%S"
time1 = now.strftime("%Y-%m-%d %H:%M:%S")
time_now = datetime.strptime(time1,date_format)
for comment in comments:
time2 = comment.created.strftime("%Y-%m-%d %H:%M:%S")
time_2 = now.strptime(time2,date_format)
diff_time = time_now - time_2
if diff_time.days > 0:
weeks = int(diff_time.days / 7)
months = int(diff_time.days / 30)
if months > 0:
comment_times.append('{} months ago'.format(months))
else:
if weeks > 0:
comment_times.append('{} weeks ago'.format(weeks))
else:
comment_times.append('{} days ago'.format(diff_time.days))
else:
hours = int(diff_time.seconds / (3600))
if hours > 0:
comment_times.append('{} hours ago'.format(hours))
else:
minutes = int((diff_time.seconds % 3600) / 60)
if minutes > 0:
comment_times.append('{} minutes ago'.format(minutes))
else:
comment_times.append('just now')
food_comments = zip(comments,comment_times)
context['food_comments'] = food_comments
self.request.session['store_id'] = store.id
return context
##############################################################
# Manager Model Views
###############################################################
class ManagerDetailView(LoginRequiredMixin, DetailView):
model = Manager
context_object_name = 'manager'
class ManagerUpdateView(LoginRequiredMixin, UpdateView):
model = Manager
fields = ['name','address','phone_num','education_degree','image']
context_object_name = 'manager'
template_name = 'Store/manager_update_form.html'
class ManagerDeleteView(LoginRequiredMixin, DeleteView):
model = Manager
success_url = reverse_lazy('store-list')
context_object_name = 'manager'
##############################################################
# Employee Model Views
###############################################################
class EmployeeDetailView(LoginRequiredMixin, DetailView):
model = Employee
context_object_name = 'employee'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
store_employees = Employee.objects.filter(store_id=self.kwargs['pk'])
employee = Employee.objects.get(id=self.kwargs['employee_id'])
if employee in store_employees:
context['employee'] = employee
else:
context['employee'] = None
return context
class EmployeeCreateView(LoginRequiredMixin, CreateView):
model = Employee
fields = ['store','name','address','phone_num','pub_date','image','position','education_degree','monthly_salary']
class EmployeeUpdateView(LoginRequiredMixin, UpdateView):
model = Employee
fields = ['name','address','phone_num','image','education_degree','position']
context_object_name = 'employee'
template_name = 'Store/employee_update_form.html'
class EmployeeDeleteView(LoginRequiredMixin, DeleteView):
model = Employee
success_url = reverse_lazy('store-detail')
context_object_name = 'employee'
| [
"hdforoozan@gmail.com"
] | hdforoozan@gmail.com |
5654cf482eb177451e9980b172ae3959c3598848 | c4249ce9e7cb26ae006bc9951ea676ae2250777b | /gamslib/tsp42/tsp42-scalar.py | 42958fd8ad72080b79cab8da32eb1cf7165217d6 | [] | no_license | vaidasj/alg-mod-rev | 79de3ef1e110f4bd07cbdef6951de2e4216f47f1 | a3ec6b5c21700a2f28ac6bf7db6aa22540748c6e | refs/heads/master | 2021-06-27T14:06:39.997411 | 2020-10-19T15:47:54 | 2020-10-19T15:47:54 | 180,074,989 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 80,771 | py | # MIP written by GAMS Convert at 12/13/18 10:32:27
#
# Equation counts
# Total E G L N X C B
# 43 43 0 0 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 862 1 861 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 2584 2584 0 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b29 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b30 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b31 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b32 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b33 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b34 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b35 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b36 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b37 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b38 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b39 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b40 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b41 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b42 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b43 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b44 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b45 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b76 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b77 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b78 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b79 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b80 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b81 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b82 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b83 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b84 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b85 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b86 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b87 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b88 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b89 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b90 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b91 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b92 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b93 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b94 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b95 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b96 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b97 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b98 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b99 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b100 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b101 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b102 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b103 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b104 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b105 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b106 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b107 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b108 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b109 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b110 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b111 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b112 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b113 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b114 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b115 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b116 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b117 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b118 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b119 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b120 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b121 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b122 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b123 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b124 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b125 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b126 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b127 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b128 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b129 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b130 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b131 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b132 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b133 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b134 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b135 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b136 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b137 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b138 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b139 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b140 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b141 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b142 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b143 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b144 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b145 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b146 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b147 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b148 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b149 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b150 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b151 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b152 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b153 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b154 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b155 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b156 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b157 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b158 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b159 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b160 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b161 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b162 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b163 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b164 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b165 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b166 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b167 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b168 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b169 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b170 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b171 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b172 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b173 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b174 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b175 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b176 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b177 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b178 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b179 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b180 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b181 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b182 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b183 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b184 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b185 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b186 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b187 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b188 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b189 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b190 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b191 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b192 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b193 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b194 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b195 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b196 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b197 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b198 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b199 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b200 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b201 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b202 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b203 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b204 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b205 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b206 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b207 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b208 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b209 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b210 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b211 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b212 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b213 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b214 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b215 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b216 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b217 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b218 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b219 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b220 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b221 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b222 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b223 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b224 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b225 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b226 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b227 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b228 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b229 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b230 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b231 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b232 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b233 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b234 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b235 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b236 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b237 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b238 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b239 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b240 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b241 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b242 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b243 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b244 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b245 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b246 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b247 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b248 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b249 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b250 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b251 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b252 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b253 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b254 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b255 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b256 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b257 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b258 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b259 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b260 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b261 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b262 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b263 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b264 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b265 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b266 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b267 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b268 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b269 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b270 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b271 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b272 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b273 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b274 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b275 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b276 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b277 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b278 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b279 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b280 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b281 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b282 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b283 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b284 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b285 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b286 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b287 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b288 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b289 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b290 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b291 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b292 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b293 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b294 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b295 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b296 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b297 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b298 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b299 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b300 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b301 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b302 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b303 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b304 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b305 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b306 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b307 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b308 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b309 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b310 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b311 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b312 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b313 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b314 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b315 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b316 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b317 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b318 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b319 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b320 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b321 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b322 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b323 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b324 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b325 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b326 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b327 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b328 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b329 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b330 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b331 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b332 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b333 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b334 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b335 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b336 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b337 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b338 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b339 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b340 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b341 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b342 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b343 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b344 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b345 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b346 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b347 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b348 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b349 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b350 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b351 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b352 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b353 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b354 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b355 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b356 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b357 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b358 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b359 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b360 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b361 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b362 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b363 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b364 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b365 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b366 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b367 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b368 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b369 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b370 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b371 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b372 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b373 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b374 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b375 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b376 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b377 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b378 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b379 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b380 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b381 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b382 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b383 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b384 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b385 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b386 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b387 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b388 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b389 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b390 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b391 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b392 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b393 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b394 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b395 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b396 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b397 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b398 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b399 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b400 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b401 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b402 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b403 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b404 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b405 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b406 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b407 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b408 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b409 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b410 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b411 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b412 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b413 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b414 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b415 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b416 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b417 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b418 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b419 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b420 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b421 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b422 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b423 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b424 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b425 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b426 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b427 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b428 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b429 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b430 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b431 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b432 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b433 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b434 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b435 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b436 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b437 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b438 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b439 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b440 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b441 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b442 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b443 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b444 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b445 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b446 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b447 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b448 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b449 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b450 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b451 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b452 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b453 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b454 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b455 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b456 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b457 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b458 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b459 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b460 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b461 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b462 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b463 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b464 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b465 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b466 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b467 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b468 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b469 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b470 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b471 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b472 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b473 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b474 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b475 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b476 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b477 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b478 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b479 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b480 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b481 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b482 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b483 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b484 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b485 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b486 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b487 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b488 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b489 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b490 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b491 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b492 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b493 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b494 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b495 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b496 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b497 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b498 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b499 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b500 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b501 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b502 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b503 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b504 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b505 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b506 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b507 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b508 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b509 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b510 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b511 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b512 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b513 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b514 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b515 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b516 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b517 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b518 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b519 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b520 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b521 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b522 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b523 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b524 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b525 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b526 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b527 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b528 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b529 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b530 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b531 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b532 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b533 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b534 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b535 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b536 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b537 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b538 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b539 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b540 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b541 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b542 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b543 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b544 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b545 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b546 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b547 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b548 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b549 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b550 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b551 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b552 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b553 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b554 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b555 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b556 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b557 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b558 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b559 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b560 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b561 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b562 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b563 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b564 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b565 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b566 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b567 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b568 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b569 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b570 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b571 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b572 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b573 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b574 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b575 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b576 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b577 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b578 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b579 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b580 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b581 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b582 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b583 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b584 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b585 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b586 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b587 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b588 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b589 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b590 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b591 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b592 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b593 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b594 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b595 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b606 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b607 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b608 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b609 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b610 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b611 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b612 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b613 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b614 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b615 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b616 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b617 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b618 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b619 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b620 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b621 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b622 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b623 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b624 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b625 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b626 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b627 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b628 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b629 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b630 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b631 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b632 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b633 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b634 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b635 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b636 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b637 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b638 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b639 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b640 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b641 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b642 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b643 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b644 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b645 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b646 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b647 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b648 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b649 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b650 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b651 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b652 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b653 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b654 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b655 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b656 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b657 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b658 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b659 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b660 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b661 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b662 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b663 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b664 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b665 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b666 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b667 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b668 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b669 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b670 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b671 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b672 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b673 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b674 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b675 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b676 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b677 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b678 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b679 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b680 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b681 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b682 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b683 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b684 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b685 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b686 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b687 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b688 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b689 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b690 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b691 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b692 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b693 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b694 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b695 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b696 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b697 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b698 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b699 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b700 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b701 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b702 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b703 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b704 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b705 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b706 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b707 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b708 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b709 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b710 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b711 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b712 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b713 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b714 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b715 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b716 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b717 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b718 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b719 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b720 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b721 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b722 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b723 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b724 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b725 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b776 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b777 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b778 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b779 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b780 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b781 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b782 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b783 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b784 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b785 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b786 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b787 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b788 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b789 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b790 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b791 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b792 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b793 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b794 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b795 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b796 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b797 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b798 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b799 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b800 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b801 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b802 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b803 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b804 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b805 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b806 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b807 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b808 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b809 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b810 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b811 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b812 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b813 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b814 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b815 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b816 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b817 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b818 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b819 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b820 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b821 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b822 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b823 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b824 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b825 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b826 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b827 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b828 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b829 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b830 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b831 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b832 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b833 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b834 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b835 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b836 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b837 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b838 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b839 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b840 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b841 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b842 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b843 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b844 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b845 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b846 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b847 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b848 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b849 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b850 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b851 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b852 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b853 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b854 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b855 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b856 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b857 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b858 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b859 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b860 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b861 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b862 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= 8*m.b2 + 39*m.b3 + 45*m.b4 + 37*m.b5 + 47*m.b6 + 9*m.b7 + 50*m.b8 + 49*m.b9 + 21*m.b10
+ 15*m.b11 + 61*m.b12 + 62*m.b13 + 21*m.b14 + 20*m.b15 + 17*m.b16 + 58*m.b17 + 60*m.b18
+ 16*m.b19 + 17*m.b20 + 18*m.b21 + 6*m.b22 + 59*m.b23 + 60*m.b24 + 15*m.b25 + 20*m.b26
+ 26*m.b27 + 17*m.b28 + 10*m.b29 + 62*m.b30 + 66*m.b31 + 20*m.b32 + 25*m.b33 + 31*m.b34
+ 22*m.b35 + 15*m.b36 + 5*m.b37 + 81*m.b38 + 81*m.b39 + 40*m.b40 + 44*m.b41 + 50*m.b42
+ 41*m.b43 + 35*m.b44 + 24*m.b45 + 20*m.b46 + 103*m.b47 + 107*m.b48 + 62*m.b49 + 67*m.b50
+ 72*m.b51 + 63*m.b52 + 57*m.b53 + 46*m.b54 + 41*m.b55 + 23*m.b56 + 108*m.b57 + 117*m.b58
+ 66*m.b59 + 71*m.b60 + 77*m.b61 + 68*m.b62 + 61*m.b63 + 51*m.b64 + 46*m.b65 + 26*m.b66
+ 11*m.b67 + 145*m.b68 + 149*m.b69 + 104*m.b70 + 108*m.b71 + 114*m.b72 + 106*m.b73 + 99*m.b74
+ 88*m.b75 + 84*m.b76 + 63*m.b77 + 49*m.b78 + 40*m.b79 + 181*m.b80 + 185*m.b81 + 140*m.b82
+ 144*m.b83 + 150*m.b84 + 142*m.b85 + 135*m.b86 + 124*m.b87 + 120*m.b88 + 99*m.b89 + 85*m.b90
+ 76*m.b91 + 35*m.b92 + 187*m.b93 + 191*m.b94 + 146*m.b95 + 150*m.b96 + 156*m.b97 + 142*m.b98
+ 137*m.b99 + 130*m.b100 + 125*m.b101 + 105*m.b102 + 90*m.b103 + 81*m.b104 + 41*m.b105
+ 10*m.b106 + 161*m.b107 + 170*m.b108 + 120*m.b109 + 124*m.b110 + 130*m.b111 + 115*m.b112
+ 110*m.b113 + 104*m.b114 + 105*m.b115 + 90*m.b116 + 72*m.b117 + 62*m.b118 + 34*m.b119
+ 31*m.b120 + 27*m.b121 + 142*m.b122 + 146*m.b123 + 101*m.b124 + 104*m.b125 + 111*m.b126
+ 97*m.b127 + 91*m.b128 + 85*m.b129 + 86*m.b130 + 75*m.b131 + 51*m.b132 + 59*m.b133 + 29*m.b134
+ 53*m.b135 + 48*m.b136 + 21*m.b137 + 174*m.b138 + 178*m.b139 + 133*m.b140 + 138*m.b141
+ 143*m.b142 + 129*m.b143 + 123*m.b144 + 117*m.b145 + 118*m.b146 + 107*m.b147 + 83*m.b148
+ 84*m.b149 + 54*m.b150 + 46*m.b151 + 35*m.b152 + 26*m.b153 + 31*m.b154 + 185*m.b155
+ 186*m.b156 + 142*m.b157 + 143*m.b158 + 140*m.b159 + 130*m.b160 + 126*m.b161 + 124*m.b162
+ 128*m.b163 + 118*m.b164 + 93*m.b165 + 101*m.b166 + 72*m.b167 + 69*m.b168 + 58*m.b169
+ 58*m.b170 + 43*m.b171 + 26*m.b172 + 164*m.b173 + 165*m.b174 + 120*m.b175 + 123*m.b176
+ 124*m.b177 + 106*m.b178 + 106*m.b179 + 105*m.b180 + 110*m.b181 + 104*m.b182 + 86*m.b183
+ 97*m.b184 + 71*m.b185 + 93*m.b186 + 82*m.b187 + 62*m.b188 + 42*m.b189 + 45*m.b190 + 22*m.b191
+ 137*m.b192 + 139*m.b193 + 94*m.b194 + 96*m.b195 + 94*m.b196 + 80*m.b197 + 78*m.b198
+ 77*m.b199 + 84*m.b200 + 77*m.b201 + 56*m.b202 + 64*m.b203 + 65*m.b204 + 90*m.b205 + 87*m.b206
+ 58*m.b207 + 36*m.b208 + 68*m.b209 + 50*m.b210 + 30*m.b211 + 117*m.b212 + 122*m.b213
+ 77*m.b214 + 80*m.b215 + 83*m.b216 + 68*m.b217 + 62*m.b218 + 60*m.b219 + 61*m.b220 + 50*m.b221
+ 34*m.b222 + 42*m.b223 + 49*m.b224 + 82*m.b225 + 77*m.b226 + 60*m.b227 + 30*m.b228 + 62*m.b229
+ 70*m.b230 + 49*m.b231 + 21*m.b232 + 114*m.b233 + 118*m.b234 + 73*m.b235 + 78*m.b236
+ 84*m.b237 + 69*m.b238 + 63*m.b239 + 57*m.b240 + 59*m.b241 + 48*m.b242 + 28*m.b243 + 36*m.b244
+ 43*m.b245 + 77*m.b246 + 72*m.b247 + 45*m.b248 + 27*m.b249 + 59*m.b250 + 69*m.b251 + 55*m.b252
+ 27*m.b253 + 5*m.b254 + 85*m.b255 + 89*m.b256 + 44*m.b257 + 48*m.b258 + 53*m.b259 + 41*m.b260
+ 34*m.b261 + 28*m.b262 + 29*m.b263 + 22*m.b264 + 23*m.b265 + 35*m.b266 + 69*m.b267 + 105*m.b268
+ 102*m.b269 + 74*m.b270 + 56*m.b271 + 88*m.b272 + 99*m.b273 + 81*m.b274 + 54*m.b275 + 32*m.b276
+ 29*m.b277 + 77*m.b278 + 80*m.b279 + 36*m.b280 + 40*m.b281 + 46*m.b282 + 34*m.b283 + 27*m.b284
+ 19*m.b285 + 21*m.b286 + 14*m.b287 + 29*m.b288 + 40*m.b289 + 77*m.b290 + 114*m.b291
+ 111*m.b292 + 84*m.b293 + 64*m.b294 + 96*m.b295 + 107*m.b296 + 87*m.b297 + 60*m.b298
+ 40*m.b299 + 37*m.b300 + 8*m.b301 + 87*m.b302 + 89*m.b303 + 44*m.b304 + 46*m.b305 + 46*m.b306
+ 30*m.b307 + 28*m.b308 + 29*m.b309 + 32*m.b310 + 27*m.b311 + 36*m.b312 + 47*m.b313 + 78*m.b314
+ 116*m.b315 + 112*m.b316 + 84*m.b317 + 66*m.b318 + 98*m.b319 + 95*m.b320 + 75*m.b321
+ 47*m.b322 + 36*m.b323 + 39*m.b324 + 12*m.b325 + 11*m.b326 + 91*m.b327 + 93*m.b328 + 48*m.b329
+ 50*m.b330 + 48*m.b331 + 34*m.b332 + 32*m.b333 + 33*m.b334 + 36*m.b335 + 30*m.b336 + 34*m.b337
+ 45*m.b338 + 77*m.b339 + 115*m.b340 + 110*m.b341 + 83*m.b342 + 63*m.b343 + 97*m.b344
+ 91*m.b345 + 72*m.b346 + 44*m.b347 + 32*m.b348 + 36*m.b349 + 9*m.b350 + 15*m.b351 + 3*m.b352
+ 105*m.b353 + 106*m.b354 + 62*m.b355 + 63*m.b356 + 64*m.b357 + 47*m.b358 + 46*m.b359
+ 49*m.b360 + 54*m.b361 + 48*m.b362 + 46*m.b363 + 59*m.b364 + 85*m.b365 + 119*m.b366
+ 115*m.b367 + 88*m.b368 + 66*m.b369 + 98*m.b370 + 79*m.b371 + 59*m.b372 + 31*m.b373 + 36*m.b374
+ 42*m.b375 + 28*m.b376 + 33*m.b377 + 21*m.b378 + 20*m.b379 + 111*m.b380 + 113*m.b381
+ 69*m.b382 + 71*m.b383 + 66*m.b384 + 51*m.b385 + 53*m.b386 + 56*m.b387 + 61*m.b388 + 57*m.b389
+ 59*m.b390 + 71*m.b391 + 96*m.b392 + 130*m.b393 + 126*m.b394 + 98*m.b395 + 75*m.b396
+ 98*m.b397 + 85*m.b398 + 62*m.b399 + 38*m.b400 + 47*m.b401 + 53*m.b402 + 39*m.b403 + 42*m.b404
+ 29*m.b405 + 30*m.b406 + 12*m.b407 + 91*m.b408 + 92*m.b409 + 50*m.b410 + 51*m.b411 + 46*m.b412
+ 30*m.b413 + 34*m.b414 + 38*m.b415 + 43*m.b416 + 49*m.b417 + 60*m.b418 + 71*m.b419 + 103*m.b420
+ 141*m.b421 + 136*m.b422 + 109*m.b423 + 90*m.b424 + 115*m.b425 + 99*m.b426 + 81*m.b427
+ 53*m.b428 + 61*m.b429 + 62*m.b430 + 36*m.b431 + 34*m.b432 + 24*m.b433 + 28*m.b434 + 20*m.b435
+ 20*m.b436 + 83*m.b437 + 85*m.b438 + 42*m.b439 + 43*m.b440 + 38*m.b441 + 22*m.b442 + 26*m.b443
+ 32*m.b444 + 36*m.b445 + 51*m.b446 + 63*m.b447 + 75*m.b448 + 106*m.b449 + 142*m.b450
+ 140*m.b451 + 112*m.b452 + 93*m.b453 + 126*m.b454 + 108*m.b455 + 88*m.b456 + 60*m.b457
+ 64*m.b458 + 66*m.b459 + 39*m.b460 + 36*m.b461 + 27*m.b462 + 31*m.b463 + 28*m.b464 + 28*m.b465
+ 8*m.b466 + 89*m.b467 + 91*m.b468 + 55*m.b469 + 55*m.b470 + 50*m.b471 + 34*m.b472 + 39*m.b473
+ 44*m.b474 + 49*m.b475 + 63*m.b476 + 76*m.b477 + 87*m.b478 + 120*m.b479 + 155*m.b480
+ 150*m.b481 + 123*m.b482 + 100*m.b483 + 123*m.b484 + 109*m.b485 + 86*m.b486 + 62*m.b487
+ 71*m.b488 + 78*m.b489 + 52*m.b490 + 49*m.b491 + 39*m.b492 + 44*m.b493 + 35*m.b494 + 24*m.b495
+ 15*m.b496 + 12*m.b497 + 95*m.b498 + 97*m.b499 + 64*m.b500 + 63*m.b501 + 56*m.b502 + 42*m.b503
+ 49*m.b504 + 56*m.b505 + 60*m.b506 + 75*m.b507 + 86*m.b508 + 97*m.b509 + 126*m.b510
+ 160*m.b511 + 155*m.b512 + 128*m.b513 + 104*m.b514 + 128*m.b515 + 113*m.b516 + 90*m.b517
+ 67*m.b518 + 76*m.b519 + 82*m.b520 + 62*m.b521 + 59*m.b522 + 49*m.b523 + 53*m.b524 + 40*m.b525
+ 29*m.b526 + 25*m.b527 + 23*m.b528 + 11*m.b529 + 74*m.b530 + 81*m.b531 + 44*m.b532 + 43*m.b533
+ 35*m.b534 + 23*m.b535 + 30*m.b536 + 39*m.b537 + 44*m.b538 + 62*m.b539 + 78*m.b540 + 89*m.b541
+ 121*m.b542 + 159*m.b543 + 155*m.b544 + 127*m.b545 + 108*m.b546 + 136*m.b547 + 124*m.b548
+ 101*m.b549 + 75*m.b550 + 79*m.b551 + 81*m.b552 + 54*m.b553 + 50*m.b554 + 42*m.b555 + 46*m.b556
+ 43*m.b557 + 39*m.b558 + 23*m.b559 + 14*m.b560 + 14*m.b561 + 21*m.b562 + 67*m.b563 + 69*m.b564
+ 42*m.b565 + 41*m.b566 + 31*m.b567 + 25*m.b568 + 32*m.b569 + 41*m.b570 + 46*m.b571 + 64*m.b572
+ 83*m.b573 + 90*m.b574 + 130*m.b575 + 164*m.b576 + 160*m.b577 + 133*m.b578 + 114*m.b579
+ 146*m.b580 + 134*m.b581 + 111*m.b582 + 85*m.b583 + 84*m.b584 + 86*m.b585 + 59*m.b586
+ 52*m.b587 + 47*m.b588 + 51*m.b589 + 53*m.b590 + 49*m.b591 + 32*m.b592 + 24*m.b593 + 24*m.b594
+ 30*m.b595 + 9*m.b596 + 74*m.b597 + 76*m.b598 + 61*m.b599 + 60*m.b600 + 42*m.b601 + 44*m.b602
+ 51*m.b603 + 60*m.b604 + 66*m.b605 + 83*m.b606 + 102*m.b607 + 110*m.b608 + 147*m.b609
+ 185*m.b610 + 179*m.b611 + 155*m.b612 + 133*m.b613 + 159*m.b614 + 146*m.b615 + 122*m.b616
+ 98*m.b617 + 105*m.b618 + 107*m.b619 + 79*m.b620 + 71*m.b621 + 66*m.b622 + 70*m.b623
+ 70*m.b624 + 60*m.b625 + 48*m.b626 + 40*m.b627 + 36*m.b628 + 33*m.b629 + 25*m.b630 + 18*m.b631
+ 57*m.b632 + 59*m.b633 + 46*m.b634 + 41*m.b635 + 25*m.b636 + 30*m.b637 + 36*m.b638 + 47*m.b639
+ 52*m.b640 + 71*m.b641 + 93*m.b642 + 98*m.b643 + 136*m.b644 + 172*m.b645 + 172*m.b646
+ 148*m.b647 + 126*m.b648 + 158*m.b649 + 147*m.b650 + 124*m.b651 + 121*m.b652 + 97*m.b653
+ 99*m.b654 + 71*m.b655 + 65*m.b656 + 59*m.b657 + 63*m.b658 + 67*m.b659 + 62*m.b660 + 46*m.b661
+ 38*m.b662 + 37*m.b663 + 43*m.b664 + 23*m.b665 + 13*m.b666 + 17*m.b667 + 45*m.b668 + 46*m.b669
+ 41*m.b670 + 34*m.b671 + 20*m.b672 + 34*m.b673 + 38*m.b674 + 48*m.b675 + 53*m.b676 + 73*m.b677
+ 96*m.b678 + 99*m.b679 + 137*m.b680 + 176*m.b681 + 178*m.b682 + 151*m.b683 + 131*m.b684
+ 163*m.b685 + 159*m.b686 + 135*m.b687 + 108*m.b688 + 102*m.b689 + 103*m.b690 + 73*m.b691
+ 67*m.b692 + 64*m.b693 + 69*m.b694 + 75*m.b695 + 72*m.b696 + 54*m.b697 + 46*m.b698 + 49*m.b699
+ 54*m.b700 + 34*m.b701 + 24*m.b702 + 29*m.b703 + 12*m.b704 + 35*m.b705 + 37*m.b706 + 35*m.b707
+ 26*m.b708 + 18*m.b709 + 34*m.b710 + 36*m.b711 + 46*m.b712 + 51*m.b713 + 70*m.b714 + 93*m.b715
+ 97*m.b716 + 134*m.b717 + 171*m.b718 + 176*m.b719 + 151*m.b720 + 129*m.b721 + 161*m.b722
+ 163*m.b723 + 139*m.b724 + 118*m.b725 + 102*m.b726 + 101*m.b727 + 71*m.b728 + 65*m.b729
+ 65*m.b730 + 70*m.b731 + 84*m.b732 + 78*m.b733 + 58*m.b734 + 50*m.b735 + 56*m.b736 + 62*m.b737
+ 41*m.b738 + 32*m.b739 + 38*m.b740 + 21*m.b741 + 9*m.b742 + 29*m.b743 + 33*m.b744 + 30*m.b745
+ 21*m.b746 + 18*m.b747 + 35*m.b748 + 33*m.b749 + 40*m.b750 + 45*m.b751 + 65*m.b752 + 87*m.b753
+ 91*m.b754 + 117*m.b755 + 166*m.b756 + 171*m.b757 + 144*m.b758 + 125*m.b759 + 157*m.b760
+ 156*m.b761 + 139*m.b762 + 113*m.b763 + 95*m.b764 + 97*m.b765 + 67*m.b766 + 60*m.b767
+ 62*m.b768 + 67*m.b769 + 79*m.b770 + 82*m.b771 + 62*m.b772 + 53*m.b773 + 59*m.b774 + 66*m.b775
+ 45*m.b776 + 38*m.b777 + 45*m.b778 + 27*m.b779 + 15*m.b780 + 6*m.b781 + 3*m.b782 + 11*m.b783
+ 41*m.b784 + 37*m.b785 + 47*m.b786 + 57*m.b787 + 55*m.b788 + 58*m.b789 + 63*m.b790 + 83*m.b791
+ 105*m.b792 + 109*m.b793 + 147*m.b794 + 186*m.b795 + 188*m.b796 + 164*m.b797 + 144*m.b798
+ 176*m.b799 + 182*m.b800 + 161*m.b801 + 134*m.b802 + 119*m.b803 + 116*m.b804 + 86*m.b805
+ 78*m.b806 + 84*m.b807 + 88*m.b808 + 101*m.b809 + 108*m.b810 + 88*m.b811 + 80*m.b812
+ 86*m.b813 + 92*m.b814 + 71*m.b815 + 64*m.b816 + 71*m.b817 + 54*m.b818 + 41*m.b819 + 32*m.b820
+ 25*m.b821 + 5*m.b822 + 12*m.b823 + 55*m.b824 + 41*m.b825 + 53*m.b826 + 64*m.b827 + 61*m.b828
+ 61*m.b829 + 66*m.b830 + 84*m.b831 + 111*m.b832 + 113*m.b833 + 150*m.b834 + 186*m.b835
+ 192*m.b836 + 166*m.b837 + 147*m.b838 + 180*m.b839 + 188*m.b840 + 167*m.b841 + 140*m.b842
+ 124*m.b843 + 119*m.b844 + 90*m.b845 + 87*m.b846 + 90*m.b847 + 94*m.b848 + 107*m.b849
+ 114*m.b850 + 77*m.b851 + 86*m.b852 + 92*m.b853 + 98*m.b854 + 80*m.b855 + 74*m.b856 + 77*m.b857
+ 60*m.b858 + 48*m.b859 + 38*m.b860 + 32*m.b861 + 6*m.b862, sense=minimize)
m.c2 = Constraint(expr= m.b2 + m.b3 + m.b5 + m.b8 + m.b12 + m.b17 + m.b23 + m.b30 + m.b38 + m.b47 + m.b57 + m.b68
+ m.b80 + m.b93 + m.b107 + m.b122 + m.b138 + m.b155 + m.b173 + m.b192 + m.b212 + m.b233 + m.b255
+ m.b278 + m.b302 + m.b327 + m.b353 + m.b380 + m.b408 + m.b437 + m.b467 + m.b498 + m.b530
+ m.b563 + m.b597 + m.b632 + m.b668 + m.b705 + m.b743 + m.b782 + m.b822 == 2)
m.c3 = Constraint(expr= m.b2 + m.b4 + m.b6 + m.b9 + m.b13 + m.b18 + m.b24 + m.b31 + m.b39 + m.b48 + m.b58 + m.b69
+ m.b81 + m.b94 + m.b108 + m.b123 + m.b139 + m.b156 + m.b174 + m.b193 + m.b213 + m.b234 + m.b256
+ m.b279 + m.b303 + m.b328 + m.b354 + m.b381 + m.b409 + m.b438 + m.b468 + m.b499 + m.b531
+ m.b564 + m.b598 + m.b633 + m.b669 + m.b706 + m.b744 + m.b783 + m.b823 == 2)
m.c4 = Constraint(expr= m.b3 + m.b4 + m.b7 + m.b10 + m.b14 + m.b19 + m.b25 + m.b32 + m.b40 + m.b49 + m.b59 + m.b70
+ m.b82 + m.b95 + m.b109 + m.b124 + m.b140 + m.b157 + m.b175 + m.b194 + m.b214 + m.b235 + m.b257
+ m.b280 + m.b304 + m.b329 + m.b355 + m.b382 + m.b410 + m.b439 + m.b469 + m.b500 + m.b532
+ m.b565 + m.b599 + m.b634 + m.b670 + m.b707 + m.b745 + m.b784 + m.b824 == 2)
m.c5 = Constraint(expr= m.b5 + m.b6 + m.b7 + m.b11 + m.b15 + m.b20 + m.b26 + m.b33 + m.b41 + m.b50 + m.b60 + m.b71
+ m.b83 + m.b96 + m.b110 + m.b125 + m.b141 + m.b158 + m.b176 + m.b195 + m.b215 + m.b236 + m.b258
+ m.b281 + m.b305 + m.b330 + m.b356 + m.b383 + m.b411 + m.b440 + m.b470 + m.b501 + m.b533
+ m.b566 + m.b600 + m.b635 + m.b671 + m.b708 + m.b746 + m.b785 + m.b825 == 2)
m.c6 = Constraint(expr= m.b8 + m.b9 + m.b10 + m.b11 + m.b16 + m.b21 + m.b27 + m.b34 + m.b42 + m.b51 + m.b61 + m.b72
+ m.b84 + m.b97 + m.b111 + m.b126 + m.b142 + m.b159 + m.b177 + m.b196 + m.b216 + m.b237 + m.b259
+ m.b282 + m.b306 + m.b331 + m.b357 + m.b384 + m.b412 + m.b441 + m.b471 + m.b502 + m.b534
+ m.b567 + m.b601 + m.b636 + m.b672 + m.b709 + m.b747 + m.b786 + m.b826 == 2)
m.c7 = Constraint(expr= m.b12 + m.b13 + m.b14 + m.b15 + m.b16 + m.b22 + m.b28 + m.b35 + m.b43 + m.b52 + m.b62 + m.b73
+ m.b85 + m.b98 + m.b112 + m.b127 + m.b143 + m.b160 + m.b178 + m.b197 + m.b217 + m.b238 + m.b260
+ m.b283 + m.b307 + m.b332 + m.b358 + m.b385 + m.b413 + m.b442 + m.b472 + m.b503 + m.b535
+ m.b568 + m.b602 + m.b637 + m.b673 + m.b710 + m.b748 + m.b787 + m.b827 == 2)
m.c8 = Constraint(expr= m.b17 + m.b18 + m.b19 + m.b20 + m.b21 + m.b22 + m.b29 + m.b36 + m.b44 + m.b53 + m.b63 + m.b74
+ m.b86 + m.b99 + m.b113 + m.b128 + m.b144 + m.b161 + m.b179 + m.b198 + m.b218 + m.b239 + m.b261
+ m.b284 + m.b308 + m.b333 + m.b359 + m.b386 + m.b414 + m.b443 + m.b473 + m.b504 + m.b536
+ m.b569 + m.b603 + m.b638 + m.b674 + m.b711 + m.b749 + m.b788 + m.b828 == 2)
m.c9 = Constraint(expr= m.b23 + m.b24 + m.b25 + m.b26 + m.b27 + m.b28 + m.b29 + m.b37 + m.b45 + m.b54 + m.b64 + m.b75
+ m.b87 + m.b100 + m.b114 + m.b129 + m.b145 + m.b162 + m.b180 + m.b199 + m.b219 + m.b240
+ m.b262 + m.b285 + m.b309 + m.b334 + m.b360 + m.b387 + m.b415 + m.b444 + m.b474 + m.b505
+ m.b537 + m.b570 + m.b604 + m.b639 + m.b675 + m.b712 + m.b750 + m.b789 + m.b829 == 2)
m.c10 = Constraint(expr= m.b30 + m.b31 + m.b32 + m.b33 + m.b34 + m.b35 + m.b36 + m.b37 + m.b46 + m.b55 + m.b65 + m.b76
+ m.b88 + m.b101 + m.b115 + m.b130 + m.b146 + m.b163 + m.b181 + m.b200 + m.b220 + m.b241
+ m.b263 + m.b286 + m.b310 + m.b335 + m.b361 + m.b388 + m.b416 + m.b445 + m.b475 + m.b506
+ m.b538 + m.b571 + m.b605 + m.b640 + m.b676 + m.b713 + m.b751 + m.b790 + m.b830 == 2)
m.c11 = Constraint(expr= m.b38 + m.b39 + m.b40 + m.b41 + m.b42 + m.b43 + m.b44 + m.b45 + m.b46 + m.b56 + m.b66 + m.b77
+ m.b89 + m.b102 + m.b116 + m.b131 + m.b147 + m.b164 + m.b182 + m.b201 + m.b221 + m.b242
+ m.b264 + m.b287 + m.b311 + m.b336 + m.b362 + m.b389 + m.b417 + m.b446 + m.b476 + m.b507
+ m.b539 + m.b572 + m.b606 + m.b641 + m.b677 + m.b714 + m.b752 + m.b791 + m.b831 == 2)
m.c12 = Constraint(expr= m.b47 + m.b48 + m.b49 + m.b50 + m.b51 + m.b52 + m.b53 + m.b54 + m.b55 + m.b56 + m.b67 + m.b78
+ m.b90 + m.b103 + m.b117 + m.b132 + m.b148 + m.b165 + m.b183 + m.b202 + m.b222 + m.b243
+ m.b265 + m.b288 + m.b312 + m.b337 + m.b363 + m.b390 + m.b418 + m.b447 + m.b477 + m.b508
+ m.b540 + m.b573 + m.b607 + m.b642 + m.b678 + m.b715 + m.b753 + m.b792 + m.b832 == 2)
m.c13 = Constraint(expr= m.b57 + m.b58 + m.b59 + m.b60 + m.b61 + m.b62 + m.b63 + m.b64 + m.b65 + m.b66 + m.b67 + m.b79
+ m.b91 + m.b104 + m.b118 + m.b133 + m.b149 + m.b166 + m.b184 + m.b203 + m.b223 + m.b244
+ m.b266 + m.b289 + m.b313 + m.b338 + m.b364 + m.b391 + m.b419 + m.b448 + m.b478 + m.b509
+ m.b541 + m.b574 + m.b608 + m.b643 + m.b679 + m.b716 + m.b754 + m.b793 + m.b833 == 2)
m.c14 = Constraint(expr= m.b68 + m.b69 + m.b70 + m.b71 + m.b72 + m.b73 + m.b74 + m.b75 + m.b76 + m.b77 + m.b78 + m.b79
+ m.b92 + m.b105 + m.b119 + m.b134 + m.b150 + m.b167 + m.b185 + m.b204 + m.b224 + m.b245
+ m.b267 + m.b290 + m.b314 + m.b339 + m.b365 + m.b392 + m.b420 + m.b449 + m.b479 + m.b510
+ m.b542 + m.b575 + m.b609 + m.b644 + m.b680 + m.b717 + m.b755 + m.b794 + m.b834 == 2)
m.c15 = Constraint(expr= m.b80 + m.b81 + m.b82 + m.b83 + m.b84 + m.b85 + m.b86 + m.b87 + m.b88 + m.b89 + m.b90 + m.b91
+ m.b92 + m.b106 + m.b120 + m.b135 + m.b151 + m.b168 + m.b186 + m.b205 + m.b225 + m.b246
+ m.b268 + m.b291 + m.b315 + m.b340 + m.b366 + m.b393 + m.b421 + m.b450 + m.b480 + m.b511
+ m.b543 + m.b576 + m.b610 + m.b645 + m.b681 + m.b718 + m.b756 + m.b795 + m.b835 == 2)
m.c16 = Constraint(expr= m.b93 + m.b94 + m.b95 + m.b96 + m.b97 + m.b98 + m.b99 + m.b100 + m.b101 + m.b102 + m.b103
+ m.b104 + m.b105 + m.b106 + m.b121 + m.b136 + m.b152 + m.b169 + m.b187 + m.b206 + m.b226
+ m.b247 + m.b269 + m.b292 + m.b316 + m.b341 + m.b367 + m.b394 + m.b422 + m.b451 + m.b481
+ m.b512 + m.b544 + m.b577 + m.b611 + m.b646 + m.b682 + m.b719 + m.b757 + m.b796 + m.b836 == 2)
m.c17 = Constraint(expr= m.b107 + m.b108 + m.b109 + m.b110 + m.b111 + m.b112 + m.b113 + m.b114 + m.b115 + m.b116
+ m.b117 + m.b118 + m.b119 + m.b120 + m.b121 + m.b137 + m.b153 + m.b170 + m.b188 + m.b207
+ m.b227 + m.b248 + m.b270 + m.b293 + m.b317 + m.b342 + m.b368 + m.b395 + m.b423 + m.b452
+ m.b482 + m.b513 + m.b545 + m.b578 + m.b612 + m.b647 + m.b683 + m.b720 + m.b758 + m.b797
+ m.b837 == 2)
m.c18 = Constraint(expr= m.b122 + m.b123 + m.b124 + m.b125 + m.b126 + m.b127 + m.b128 + m.b129 + m.b130 + m.b131
+ m.b132 + m.b133 + m.b134 + m.b135 + m.b136 + m.b137 + m.b154 + m.b171 + m.b189 + m.b208
+ m.b228 + m.b249 + m.b271 + m.b294 + m.b318 + m.b343 + m.b369 + m.b396 + m.b424 + m.b453
+ m.b483 + m.b514 + m.b546 + m.b579 + m.b613 + m.b648 + m.b684 + m.b721 + m.b759 + m.b798
+ m.b838 == 2)
m.c19 = Constraint(expr= m.b138 + m.b139 + m.b140 + m.b141 + m.b142 + m.b143 + m.b144 + m.b145 + m.b146 + m.b147
+ m.b148 + m.b149 + m.b150 + m.b151 + m.b152 + m.b153 + m.b154 + m.b172 + m.b190 + m.b209
+ m.b229 + m.b250 + m.b272 + m.b295 + m.b319 + m.b344 + m.b370 + m.b397 + m.b425 + m.b454
+ m.b484 + m.b515 + m.b547 + m.b580 + m.b614 + m.b649 + m.b685 + m.b722 + m.b760 + m.b799
+ m.b839 == 2)
m.c20 = Constraint(expr= m.b155 + m.b156 + m.b157 + m.b158 + m.b159 + m.b160 + m.b161 + m.b162 + m.b163 + m.b164
+ m.b165 + m.b166 + m.b167 + m.b168 + m.b169 + m.b170 + m.b171 + m.b172 + m.b191 + m.b210
+ m.b230 + m.b251 + m.b273 + m.b296 + m.b320 + m.b345 + m.b371 + m.b398 + m.b426 + m.b455
+ m.b485 + m.b516 + m.b548 + m.b581 + m.b615 + m.b650 + m.b686 + m.b723 + m.b761 + m.b800
+ m.b840 == 2)
m.c21 = Constraint(expr= m.b173 + m.b174 + m.b175 + m.b176 + m.b177 + m.b178 + m.b179 + m.b180 + m.b181 + m.b182
+ m.b183 + m.b184 + m.b185 + m.b186 + m.b187 + m.b188 + m.b189 + m.b190 + m.b191 + m.b211
+ m.b231 + m.b252 + m.b274 + m.b297 + m.b321 + m.b346 + m.b372 + m.b399 + m.b427 + m.b456
+ m.b486 + m.b517 + m.b549 + m.b582 + m.b616 + m.b651 + m.b687 + m.b724 + m.b762 + m.b801
+ m.b841 == 2)
m.c22 = Constraint(expr= m.b192 + m.b193 + m.b194 + m.b195 + m.b196 + m.b197 + m.b198 + m.b199 + m.b200 + m.b201
+ m.b202 + m.b203 + m.b204 + m.b205 + m.b206 + m.b207 + m.b208 + m.b209 + m.b210 + m.b211
+ m.b232 + m.b253 + m.b275 + m.b298 + m.b322 + m.b347 + m.b373 + m.b400 + m.b428 + m.b457
+ m.b487 + m.b518 + m.b550 + m.b583 + m.b617 + m.b652 + m.b688 + m.b725 + m.b763 + m.b802
+ m.b842 == 2)
m.c23 = Constraint(expr= m.b212 + m.b213 + m.b214 + m.b215 + m.b216 + m.b217 + m.b218 + m.b219 + m.b220 + m.b221
+ m.b222 + m.b223 + m.b224 + m.b225 + m.b226 + m.b227 + m.b228 + m.b229 + m.b230 + m.b231
+ m.b232 + m.b254 + m.b276 + m.b299 + m.b323 + m.b348 + m.b374 + m.b401 + m.b429 + m.b458
+ m.b488 + m.b519 + m.b551 + m.b584 + m.b618 + m.b653 + m.b689 + m.b726 + m.b764 + m.b803
+ m.b843 == 2)
m.c24 = Constraint(expr= m.b233 + m.b234 + m.b235 + m.b236 + m.b237 + m.b238 + m.b239 + m.b240 + m.b241 + m.b242
+ m.b243 + m.b244 + m.b245 + m.b246 + m.b247 + m.b248 + m.b249 + m.b250 + m.b251 + m.b252
+ m.b253 + m.b254 + m.b277 + m.b300 + m.b324 + m.b349 + m.b375 + m.b402 + m.b430 + m.b459
+ m.b489 + m.b520 + m.b552 + m.b585 + m.b619 + m.b654 + m.b690 + m.b727 + m.b765 + m.b804
+ m.b844 == 2)
m.c25 = Constraint(expr= m.b255 + m.b256 + m.b257 + m.b258 + m.b259 + m.b260 + m.b261 + m.b262 + m.b263 + m.b264
+ m.b265 + m.b266 + m.b267 + m.b268 + m.b269 + m.b270 + m.b271 + m.b272 + m.b273 + m.b274
+ m.b275 + m.b276 + m.b277 + m.b301 + m.b325 + m.b350 + m.b376 + m.b403 + m.b431 + m.b460
+ m.b490 + m.b521 + m.b553 + m.b586 + m.b620 + m.b655 + m.b691 + m.b728 + m.b766 + m.b805
+ m.b845 == 2)
m.c26 = Constraint(expr= m.b278 + m.b279 + m.b280 + m.b281 + m.b282 + m.b283 + m.b284 + m.b285 + m.b286 + m.b287
+ m.b288 + m.b289 + m.b290 + m.b291 + m.b292 + m.b293 + m.b294 + m.b295 + m.b296 + m.b297
+ m.b298 + m.b299 + m.b300 + m.b301 + m.b326 + m.b351 + m.b377 + m.b404 + m.b432 + m.b461
+ m.b491 + m.b522 + m.b554 + m.b587 + m.b621 + m.b656 + m.b692 + m.b729 + m.b767 + m.b806
+ m.b846 == 2)
m.c27 = Constraint(expr= m.b302 + m.b303 + m.b304 + m.b305 + m.b306 + m.b307 + m.b308 + m.b309 + m.b310 + m.b311
+ m.b312 + m.b313 + m.b314 + m.b315 + m.b316 + m.b317 + m.b318 + m.b319 + m.b320 + m.b321
+ m.b322 + m.b323 + m.b324 + m.b325 + m.b326 + m.b352 + m.b378 + m.b405 + m.b433 + m.b462
+ m.b492 + m.b523 + m.b555 + m.b588 + m.b622 + m.b657 + m.b693 + m.b730 + m.b768 + m.b807
+ m.b847 == 2)
m.c28 = Constraint(expr= m.b327 + m.b328 + m.b329 + m.b330 + m.b331 + m.b332 + m.b333 + m.b334 + m.b335 + m.b336
+ m.b337 + m.b338 + m.b339 + m.b340 + m.b341 + m.b342 + m.b343 + m.b344 + m.b345 + m.b346
+ m.b347 + m.b348 + m.b349 + m.b350 + m.b351 + m.b352 + m.b379 + m.b406 + m.b434 + m.b463
+ m.b493 + m.b524 + m.b556 + m.b589 + m.b623 + m.b658 + m.b694 + m.b731 + m.b769 + m.b808
+ m.b848 == 2)
m.c29 = Constraint(expr= m.b353 + m.b354 + m.b355 + m.b356 + m.b357 + m.b358 + m.b359 + m.b360 + m.b361 + m.b362
+ m.b363 + m.b364 + m.b365 + m.b366 + m.b367 + m.b368 + m.b369 + m.b370 + m.b371 + m.b372
+ m.b373 + m.b374 + m.b375 + m.b376 + m.b377 + m.b378 + m.b379 + m.b407 + m.b435 + m.b464
+ m.b494 + m.b525 + m.b557 + m.b590 + m.b624 + m.b659 + m.b695 + m.b732 + m.b770 + m.b809
+ m.b849 == 2)
m.c30 = Constraint(expr= m.b380 + m.b381 + m.b382 + m.b383 + m.b384 + m.b385 + m.b386 + m.b387 + m.b388 + m.b389
+ m.b390 + m.b391 + m.b392 + m.b393 + m.b394 + m.b395 + m.b396 + m.b397 + m.b398 + m.b399
+ m.b400 + m.b401 + m.b402 + m.b403 + m.b404 + m.b405 + m.b406 + m.b407 + m.b436 + m.b465
+ m.b495 + m.b526 + m.b558 + m.b591 + m.b625 + m.b660 + m.b696 + m.b733 + m.b771 + m.b810
+ m.b850 == 2)
m.c31 = Constraint(expr= m.b408 + m.b409 + m.b410 + m.b411 + m.b412 + m.b413 + m.b414 + m.b415 + m.b416 + m.b417
+ m.b418 + m.b419 + m.b420 + m.b421 + m.b422 + m.b423 + m.b424 + m.b425 + m.b426 + m.b427
+ m.b428 + m.b429 + m.b430 + m.b431 + m.b432 + m.b433 + m.b434 + m.b435 + m.b436 + m.b466
+ m.b496 + m.b527 + m.b559 + m.b592 + m.b626 + m.b661 + m.b697 + m.b734 + m.b772 + m.b811
+ m.b851 == 2)
m.c32 = Constraint(expr= m.b437 + m.b438 + m.b439 + m.b440 + m.b441 + m.b442 + m.b443 + m.b444 + m.b445 + m.b446
+ m.b447 + m.b448 + m.b449 + m.b450 + m.b451 + m.b452 + m.b453 + m.b454 + m.b455 + m.b456
+ m.b457 + m.b458 + m.b459 + m.b460 + m.b461 + m.b462 + m.b463 + m.b464 + m.b465 + m.b466
+ m.b497 + m.b528 + m.b560 + m.b593 + m.b627 + m.b662 + m.b698 + m.b735 + m.b773 + m.b812
+ m.b852 == 2)
m.c33 = Constraint(expr= m.b467 + m.b468 + m.b469 + m.b470 + m.b471 + m.b472 + m.b473 + m.b474 + m.b475 + m.b476
+ m.b477 + m.b478 + m.b479 + m.b480 + m.b481 + m.b482 + m.b483 + m.b484 + m.b485 + m.b486
+ m.b487 + m.b488 + m.b489 + m.b490 + m.b491 + m.b492 + m.b493 + m.b494 + m.b495 + m.b496
+ m.b497 + m.b529 + m.b561 + m.b594 + m.b628 + m.b663 + m.b699 + m.b736 + m.b774 + m.b813
+ m.b853 == 2)
m.c34 = Constraint(expr= m.b498 + m.b499 + m.b500 + m.b501 + m.b502 + m.b503 + m.b504 + m.b505 + m.b506 + m.b507
+ m.b508 + m.b509 + m.b510 + m.b511 + m.b512 + m.b513 + m.b514 + m.b515 + m.b516 + m.b517
+ m.b518 + m.b519 + m.b520 + m.b521 + m.b522 + m.b523 + m.b524 + m.b525 + m.b526 + m.b527
+ m.b528 + m.b529 + m.b562 + m.b595 + m.b629 + m.b664 + m.b700 + m.b737 + m.b775 + m.b814
+ m.b854 == 2)
m.c35 = Constraint(expr= m.b530 + m.b531 + m.b532 + m.b533 + m.b534 + m.b535 + m.b536 + m.b537 + m.b538 + m.b539
+ m.b540 + m.b541 + m.b542 + m.b543 + m.b544 + m.b545 + m.b546 + m.b547 + m.b548 + m.b549
+ m.b550 + m.b551 + m.b552 + m.b553 + m.b554 + m.b555 + m.b556 + m.b557 + m.b558 + m.b559
+ m.b560 + m.b561 + m.b562 + m.b596 + m.b630 + m.b665 + m.b701 + m.b738 + m.b776 + m.b815
+ m.b855 == 2)
m.c36 = Constraint(expr= m.b563 + m.b564 + m.b565 + m.b566 + m.b567 + m.b568 + m.b569 + m.b570 + m.b571 + m.b572
+ m.b573 + m.b574 + m.b575 + m.b576 + m.b577 + m.b578 + m.b579 + m.b580 + m.b581 + m.b582
+ m.b583 + m.b584 + m.b585 + m.b586 + m.b587 + m.b588 + m.b589 + m.b590 + m.b591 + m.b592
+ m.b593 + m.b594 + m.b595 + m.b596 + m.b631 + m.b666 + m.b702 + m.b739 + m.b777 + m.b816
+ m.b856 == 2)
m.c37 = Constraint(expr= m.b597 + m.b598 + m.b599 + m.b600 + m.b601 + m.b602 + m.b603 + m.b604 + m.b605 + m.b606
+ m.b607 + m.b608 + m.b609 + m.b610 + m.b611 + m.b612 + m.b613 + m.b614 + m.b615 + m.b616
+ m.b617 + m.b618 + m.b619 + m.b620 + m.b621 + m.b622 + m.b623 + m.b624 + m.b625 + m.b626
+ m.b627 + m.b628 + m.b629 + m.b630 + m.b631 + m.b667 + m.b703 + m.b740 + m.b778 + m.b817
+ m.b857 == 2)
m.c38 = Constraint(expr= m.b632 + m.b633 + m.b634 + m.b635 + m.b636 + m.b637 + m.b638 + m.b639 + m.b640 + m.b641
+ m.b642 + m.b643 + m.b644 + m.b645 + m.b646 + m.b647 + m.b648 + m.b649 + m.b650 + m.b651
+ m.b652 + m.b653 + m.b654 + m.b655 + m.b656 + m.b657 + m.b658 + m.b659 + m.b660 + m.b661
+ m.b662 + m.b663 + m.b664 + m.b665 + m.b666 + m.b667 + m.b704 + m.b741 + m.b779 + m.b818
+ m.b858 == 2)
m.c39 = Constraint(expr= m.b668 + m.b669 + m.b670 + m.b671 + m.b672 + m.b673 + m.b674 + m.b675 + m.b676 + m.b677
+ m.b678 + m.b679 + m.b680 + m.b681 + m.b682 + m.b683 + m.b684 + m.b685 + m.b686 + m.b687
+ m.b688 + m.b689 + m.b690 + m.b691 + m.b692 + m.b693 + m.b694 + m.b695 + m.b696 + m.b697
+ m.b698 + m.b699 + m.b700 + m.b701 + m.b702 + m.b703 + m.b704 + m.b742 + m.b780 + m.b819
+ m.b859 == 2)
m.c40 = Constraint(expr= m.b705 + m.b706 + m.b707 + m.b708 + m.b709 + m.b710 + m.b711 + m.b712 + m.b713 + m.b714
+ m.b715 + m.b716 + m.b717 + m.b718 + m.b719 + m.b720 + m.b721 + m.b722 + m.b723 + m.b724
+ m.b725 + m.b726 + m.b727 + m.b728 + m.b729 + m.b730 + m.b731 + m.b732 + m.b733 + m.b734
+ m.b735 + m.b736 + m.b737 + m.b738 + m.b739 + m.b740 + m.b741 + m.b742 + m.b781 + m.b820
+ m.b860 == 2)
m.c41 = Constraint(expr= m.b743 + m.b744 + m.b745 + m.b746 + m.b747 + m.b748 + m.b749 + m.b750 + m.b751 + m.b752
+ m.b753 + m.b754 + m.b755 + m.b756 + m.b757 + m.b758 + m.b759 + m.b760 + m.b761 + m.b762
+ m.b763 + m.b764 + m.b765 + m.b766 + m.b767 + m.b768 + m.b769 + m.b770 + m.b771 + m.b772
+ m.b773 + m.b774 + m.b775 + m.b776 + m.b777 + m.b778 + m.b779 + m.b780 + m.b781 + m.b821
+ m.b861 == 2)
m.c42 = Constraint(expr= m.b782 + m.b783 + m.b784 + m.b785 + m.b786 + m.b787 + m.b788 + m.b789 + m.b790 + m.b791
+ m.b792 + m.b793 + m.b794 + m.b795 + m.b796 + m.b797 + m.b798 + m.b799 + m.b800 + m.b801
+ m.b802 + m.b803 + m.b804 + m.b805 + m.b806 + m.b807 + m.b808 + m.b809 + m.b810 + m.b811
+ m.b812 + m.b813 + m.b814 + m.b815 + m.b816 + m.b817 + m.b818 + m.b819 + m.b820 + m.b821
+ m.b862 == 2)
m.c43 = Constraint(expr= m.b822 + m.b823 + m.b824 + m.b825 + m.b826 + m.b827 + m.b828 + m.b829 + m.b830 + m.b831
+ m.b832 + m.b833 + m.b834 + m.b835 + m.b836 + m.b837 + m.b838 + m.b839 + m.b840 + m.b841
+ m.b842 + m.b843 + m.b844 + m.b845 + m.b846 + m.b847 + m.b848 + m.b849 + m.b850 + m.b851
+ m.b852 + m.b853 + m.b854 + m.b855 + m.b856 + m.b857 + m.b858 + m.b859 + m.b860 + m.b861
+ m.b862 == 2)
| [
"v.jusevicius@gmail.com"
] | v.jusevicius@gmail.com |
6786a568c54a17e0b7e59fcb7c9bc5425a1874be | 1eccb884bb72a07b7d5417c523fa082c13933984 | /backend/backend/settings.py | cd9d385829fbfeb6b9aa667d5a22266fca66aeff | [] | no_license | sajal09/rainbow- | 5c3bc744c406f30a6516ce3c5449579f4ca3a1aa | 963ccaa446c642cd9fde5ccc262d4a54a9b80561 | refs/heads/master | 2023-01-24T02:14:58.470392 | 2020-07-15T13:10:57 | 2020-07-15T13:10:57 | 124,750,812 | 0 | 0 | null | 2023-01-05T22:27:00 | 2018-03-11T11:44:50 | JavaScript | UTF-8 | Python | false | false | 3,222 | py | """
Django settings for backend project.
Generated by 'django-admin startproject' using Django 3.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+#4be8bqf5x(ddp@i7v830nkh!ug5qkk-4%7jq9fccq6hmp!q1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
CORS_ORIGIN_ALLOW_ALL = True
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'backendapi',
'corsheaders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware'
]
ROOT_URLCONF = 'backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"sajalchaurasia09@gmail.com"
] | sajalchaurasia09@gmail.com |
f8880d12b3954bf1f29a84e2fa0adf8ba9e779d6 | 52a4d869976a97498bdf56a8d0ff92cac138a136 | /Bioinformatics Textbook Track/Chapter 2/rosalind_ba2d.py | 238db719734990db59ec3be92ca629ff672af9ea | [] | no_license | aakibinesar/Rosalind | d726369a787d848cc378976b886189978a60a3a5 | 375bbdbfb16bf11b2f980701bbd0ba74a1605cdb | refs/heads/master | 2022-08-18T09:36:00.941080 | 2020-05-24T18:49:38 | 2020-05-24T18:49:38 | 264,722,651 | 0 | 0 | null | 2020-05-17T17:51:03 | 2020-05-17T17:40:59 | null | UTF-8 | Python | false | false | 1,561 | py | def greedymotifsearch(dna,k,t):
best = [s[:k] for s in dna]
for i in range(len(dna[0])-k+1):
tempbest = [dna[0][i:i+k]]
for m in range(1,t):
matrix = motifsToProfile(tempbest)
tempbest.append(profileMostProbablekmer(dna[m],k,matrix))
if score(tempbest) < score(best):
best = tempbest
return best
def score(motifs):
z = zip(*motifs)
thescore = 0
for string in z:
score = len(string) - max([string.count('A'), string.count('C'), string.count('G'), string.count('T')])
thescore += score
return thescore
def motifsToProfile(motifs):
d = {}
n = float(len(motifs))
z = list(zip(*motifs))
for i in range(len(z)):
d.setdefault('A', []).append(z[i].count('A')/n)
d.setdefault('C', []).append(z[i].count('C')/n)
d.setdefault('G', []).append(z[i].count('G')/n)
d.setdefault('T', []).append(z[i].count('T')/n)
return d
def profileMostProbablekmer(text, k , matrix):
maxp = None
probablekmer = None
for i in range(len(text)-k+1):
kmer = text[i:i+k]
pt = 1
for j in range(k):
p = matrix[kmer[j]][j]
pt *=p
if maxp == None or pt > maxp:
maxp = pt
probablekmer = kmer
return probablekmer
with open('rosalind_ba2d.txt') as f:
k,t = map(int,f.readline().rstrip().split(' '))
strings = [st.rstrip() for st in f.readlines()]
print('\n'.join(greedymotifsearch(strings,k,t))) # bug: may be wrong , try several times | [
"noreply@github.com"
] | aakibinesar.noreply@github.com |
3fa74ef1b8d7691e99faafe2a76a31346ce5aa33 | 35747e9ab6b1b70720ec90efcfce3e72e792b28c | /DQNAgent.py | 48dbd05199dcdda8e32e0a2152f104462b4ac961 | [] | no_license | nayansinghal/Deep-Q-Learning | aa568ff2f0400247f10ed4acd9732e693092b6d1 | 95b706500fd9539b5818ca7b147f79a44472c210 | refs/heads/master | 2021-08-27T19:54:52.692100 | 2017-11-04T21:03:45 | 2017-11-04T21:03:45 | 108,695,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,911 | py | import numpy as np
from collections import deque
import random
from ModelBuilder import ModelBuilder
from AtariModel import AtariModel
from Processor import AtariProcessor
from Memory import Memory
class DQNAgent:
def __init__(self, processor, state_size, action_size, lr=0.001, epsilon=1.0, model_path=None):
self.processor = processor
self.state_size = state_size
self.action_size = action_size
self.memory = Memory()
self.gamma = 0.99
self.epsilon = epsilon
self.epsilon_min = 0.1
self.epsilon_decay = 0.995
self.model = AtariModel(state_size, action_size, lr)
self.model.create_model()
self.model.compile_model()
self.model.load(model_path)
def remember(self, state, action, reward, next_state, done):
self.memory.append(state)
next_state = self.memory.get_recent_state(next_state)
self.memory.memory.append((self.memory.current_state, action, reward, next_state, done))
def get_Act(self, state):
if np.random.rand() <= self.epsilon:
return random.randrange(self.action_size)
state = self.memory.get_recent_state(state)
state = np.expand_dims(state, axis=0)
return np.argmax(self.model.model.predict(state)[0])
def replay(self, batch_size):
minibatch = random.sample(self.memory.memory, batch_size)
state, action, reward, next_state, done = zip(*minibatch)
next_state = np.array(next_state)
state = np.array(state)
target = reward + self.gamma * np.amax(self.model.model.predict(next_state), axis=1)
target_f = self.model.model.predict(state)
for idx, (target_, target, R, action, done) in enumerate(zip(target_f, target, reward, action, done)):
if not done:
target_[action] = target.astype('float32')
else:
target_[action] = R
self.model.model.fit(state, target_f, verbose=0, epochs=1)
# Since Training, not decreasing the exploration rate
# if self.epsilon > self.epsilon_min:
# self.epsilon *= self.epsilon_decay | [
"singhalnayan91@gmail.com"
] | singhalnayan91@gmail.com |
007455755e4df954816604e5b84ea3665446662f | bc900ac54e870c5150f1cf64a1196037c54835ce | /Main.py | 9d440ad2e8b1d45884e8a288feb45b10fddf7d26 | [] | no_license | ZeCanelha/AED_PROJECTO1 | 2065c12beb5047e80025f710c05e58b5dac53985 | 051991e4b2779d816139f520d4798d34ca94ff27 | refs/heads/master | 2021-01-19T21:15:05.700968 | 2017-03-26T22:14:48 | 2017-03-26T22:14:48 | 82,474,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,443 | py | import csv
import DoubleNode as DoubleNode
import DoubleLinkedList as DLList
import Functions as F
import re as split
#TODO Criar um dicionario com os pares de Pais / Sigla
_par_sigla_pais = {};
def create_ddlist(data):
DList = DLList.DoubleLinkedList()
Function = F.Functions(1,DList,_par_sigla_pais)
""" Cada no vai ter informacao sobre um pais """
for i in range(len(data)-1):
DList.add_end(data[i])
while(1):
print("\t\tDouble Linked Lists\n\n\t1.Search Element\n\t2.Insert Element\n\t3.Edit Element\n\t4.Remove Element\n\t5.Main Menu\n\t6.Print")
opt = input()
""" Search """
if ( opt == 1 ):
print("Country Name or Country Code:")
response = raw_input()
print("Index: %s\n" % Function.search(response))
""" Insert: Vai dar pra inserir tudo """
if ( opt == 2):
#Inserir um novo pais implica inserir um pais + sigla e um array de anos vazios 1960-2016 | 46 posicoes
print("1. Insert new country\n")
print("2. Insert new data on existent countries\n")
new_opt = input()
if ( new_opt == 1 ):
print("Country name: ")
country = raw_input()
print("Sigla: ")
sig = raw_input()
print(Function.inser_new_country(country,sig))
if ( new_opt == 2 ):
print("Country: ")
country = raw_input()
print("Year: ")
year = input()
print("Data: ")
data = input()
Function.insert(country,year,data)
""" Edit: Penso que seja so editar funcoes em determinados anos """
if ( opt == 3):
print("Country: ")
country = raw_input()
print("Year: ")
year = input()
print("Data: ")
data = input()
print(Function.edit(country,year,data))
""" Remove: Remove deve ser como insert, remover tudo """
if ( opt == 4):
print("1.Remove Country")
print("2.Remove information")
opt1 = input()
if ( opt1 == 1 ):
print("Country: ")
country = raw_input()
print(Function.remove_country(country))
DList.print_list()
if ( opt1 == 2 ):
print("Country: ")
country = raw_input()
print("Year: ")
year = input()
print(Function.remove(country,year))
if opt == 6:
DList.print_list()
if ( opt == 5):
break
def read_csv_files():
""" Ler os dados do ficheiro """
i = 0
data_list = []
with open('dados.csv','r') as fp:
csvreader = csv.reader(fp,delimiter=";", quotechar='"')
for row in csvreader:
list = ('|'.join(row)).split('|')
data_list.append(list)
_par_sigla_pais[data_list[i][0]] = data_list[i][1]
i = i + 1
""" Retorna toda a informcao do ficheiro numa lista """
return data_list
def init():
data = read_csv_files()
while(1):
print("\t\tMenu\n\n\t1.Double Linked Lists\n\t2.Exit")
opt = input()
if ( opt == 1 ):
create_ddlist(data)
if ( opt == 2):
break;
if __name__ == '__main__':
init()
| [
"josemc@student.dei.uc.pt"
] | josemc@student.dei.uc.pt |
7c9e10c88fe57e659271b8670b20e26f8d2ecefe | 911e7c7ba3ff999eb58275595d934dee968d8f80 | /trunk/agent/src/main/python/ambari_agent/ZooKeeperCommunicator.py | 576a4a4b458c22c4da1e845e50d351cc59c69c8a | [
"Apache-2.0"
] | permissive | sreev/ambari | 1bce266ed0d318af3ebe50a7bd6b378083612330 | 48b92a5aa58debe7824f4337a97dfa8a8bb07f71 | refs/heads/master | 2021-01-10T21:44:10.844859 | 2012-04-19T19:20:28 | 2012-04-19T19:20:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,890 | py | #!/usr/bin/env python2.6
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import logging
import logging.handlers
import signal
import simplejson
import socket
import sys, traceback
import time
import threading
import zookeeper
from optparse import OptionParser
from Runner import Runner
logger = logging.getLogger()
options = None
args = None
ZOOKEEPER_OPEN_ACL_UNSAFE = {"perms":0x1f, "scheme":"world", "id" :"anyone"}
timeout = 10
connected = False
conn_cv = threading.Condition()
session_time = 100000
class ZooKeeperCommunicator(threading.Thread):
hmsZkPrefix = "/clusters"
def __init__(self, zkservers, credential):
threading.Thread.__init__(self)
logger.debug('Initializing ZooKeeperCommunicator thread.')
zookeeper.set_debug_level(zookeeper.LOG_LEVEL_DEBUG)
self.zh = None
self.zkservers = zkservers
self.lock = threading.Lock()
self.acl = [ZOOKEEPER_OPEN_ACL_UNSAFE]
self.safeMode = True
self.credential = credential
def auth_callback(self, zh, result_code):
conn_cv.acquire()
conn_cv.notify()
conn_cv.release()
def conn_callback(self, zh, *args):
conn_cv.acquire()
conn_cv.notify()
conn_cv.release()
def start(self):
conn_cv.acquire()
self.zh = zookeeper.init(self.zkservers, self.conn_callback, session_time)
conn_cv.wait()
conn_cv.release()
if self.credential!=None:
print "credential: "+self.credential
conn_cv.acquire()
zookeeper.add_auth(self.zh, "digest", self.credential, self.auth_callback)
conn_cv.wait()
conn_cv.release()
logger.info("ZooKeeper connection established.")
def __del__(self):
zookeeper.close(self.zh)
def locate(self):
hostname = socket.gethostname()
try:
children = sorted(zookeeper.get_children(self.zh, self.hmsZkPrefix))
for child in children:
znode = self.hmsZkPrefix + '/' + child + '/' + hostname
if zookeeper.exists(self.zh, znode, None)!=None:
self.znode = znode
self.actionNode = znode + '/action'
self.statusNode = '/status'
stat, acl = zookeeper.get_acl(self.zh, self.statusNode)
self.acl = acl
if zookeeper.OK == self.aget():
self.safeMode = False
break
except:
self.safeMode = True
if self.safeMode != False:
logger.warn("Can not locate " + hostname + " in zookeeper, sleep " + str(timeout) + " seconds.")
if self.lock.locked():
self.lock.release()
def update(self, zh, node, object):
buffer = simplejson.dumps(object)
if zookeeper.exists(zh, node, None) != None:
zookeeper.delete(zh, node, 0)
zookeeper.create(zh, node, buffer, self.acl, 0)
def enqueue(self, zh, node, object):
buffer = simplejson.dumps(object)
zookeeper.create(zh, node, buffer, self.acl, zookeeper.SEQUENCE)
def launch(self, zh, workLogNode, actionNode, statusNode):
state = {}
data = zookeeper.get(zh, actionNode, 0)
jsonp = simplejson.loads(data[0])
state['cmdPath'] = jsonp['cmdPath']
state['actionPath'] = actionNode
state['actionId'] = jsonp['actionId']
state['host'] = self.znode
state['status']='STARTING'
self.update(zh, workLogNode, state)
logger.info("Launch: "+simplejson.dumps(jsonp))
dispatcher = Runner()
try:
result = dispatcher.run(jsonp)
logger.info("Result: "+simplejson.dumps(result))
if "exit_code" in result and result['exit_code']==0:
state['status']='SUCCEEDED'
else:
state['status']='FAILED'
except:
logger.exception('Execution error: '+actionNode)
state['status']='FAILED'
self.update(zh, workLogNode, state)
self.enqueue(zh, statusNode, state)
def aget(self):
return zookeeper.aget_children(self.zh, self.actionNode, self.queue_watcher, self.queue_callback)
def queue_watcher(self, zh, event, state, path):
if zookeeper.OK != self.aget():
logger.error('Fail to monitor action queue for: '+self.actionNode)
self.safeMode = True
def queue_callback(self, zh, rc, data):
if zookeeper.OK == rc:
try:
for child in sorted(data):
action = self.actionNode + '/' + child
workLog = self.actionNode + '/' + child + '/worklog'
statusLog = self.statusNode + '/status-'
""" Launch the task if the task has not been executed """
if zookeeper.exists(zh, workLog, None) == None:
self.launch(zh, workLog, action, statusLog)
else:
""" If task has been previous launched, check for partial execution """
buffer = zookeeper.get(zh, workLog, 0)
state = simplejson.loads(buffer[0])
""" If task is incompleted in execution, launch again """
if 'status' in state and state['status'] == 'STARTING':
logger.info('Relaunch '+child)
self.launch(zh, workLog, action, statusLog)
else:
""" If the task has been launched, and completed, update status queue """
if zookeeper.exists(zh, statusLog, None) == None:
logger.info('Update status.')
self.update(zh, statusLog, state)
except NoNodeException, err:
""" Skip no node exception """
except Exception, err:
logger.exception(err)
else:
if zookeeper.NONODE == rc:
self.safeMode = True
if self.lock.locked():
self.lock.release()
def run(self):
self.locate()
while True:
try:
self.lock.acquire()
if self.safeMode == True:
time.sleep(timeout)
zookeeper.close(self.zh)
conn_cv.acquire()
self.zh = zookeeper.init(self.zkservers, self.conn_callback, session_time)
conn_cv.wait()
conn_cv.release()
self.locate()
if self.safeMode == False:
if zookeeper.OK != zookeeper.aget_children(self.zh, self.actionNode, self.queue_watcher, None):
logger.error('Fail to monitor action queue for: '+self.actionNode+', activate safe mode.')
self.safeMode = True
except Exception, err:
logger.exception(err)
def main(argv=None):
# Allow Ctrl-C
signal.signal(signal.SIGINT, signal.SIG_DFL)
parser = OptionParser()
parser.add_option('-v', '--verbose',
dest='verbose',
default=False,
action='store_true',
help='Verbose logging. (default: %default)')
parser.add_option('--zkservers',
dest='zkservers',
default='localhost:2181',
help='Comma-separated list of host:port pairs. (default: %default)')
global options
global args
(options, args) = parser.parse_args()
if options.verbose:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - %(message)s")
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.info('Starting Zookeeper python example: %s' % ' '.join(sys.argv))
zc = ZooKeeperCommunicator("localhost:2181")
zc.start()
zc.join()
if __name__ == '__main__':
main()
| [
"sree_at_chess@yahoo.com"
] | sree_at_chess@yahoo.com |
077b5d30d14d285cab485c2c132910e626e14fc2 | dac8ec1bca7b390dac4aaedf94be0445cec03b66 | /lab04/run_NB.py | 53834b60d91845a6d924d88d71b14cd56d826d5d | [] | no_license | ldakir/Machine-Learning | 49333f641f745bcc523d6842fdc5e1c5db449d16 | 033a38459cac0d0c212f88a84ddcc6b772ec13dc | refs/heads/master | 2020-09-02T12:10:02.310934 | 2019-11-02T21:50:19 | 2019-11-02T21:50:19 | 219,218,176 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | """
Top level comment: be sure to include the purpose/contents of this file
as well as the author(s)
"""
import util
from Partition import *
from NaiveBayes import *
import numpy as np
def main():
opts = util.parse_args()
train_partition = util.read_arff(opts.train_filename)
test_partition = util.read_arff(opts.test_filename)
#Creating Naive Bayes Model
nb_model = NaiveBayes(train_partition)
m = len(test_partition.labels)
confusion_matrix = np.zeros((m,m)) #initializing the confusion matrix
accuracy = 0
for x in test_partition.data:
y_hat = nb_model.classify(x.features)
y = x.label
confusion_matrix[y][y_hat] +=1
if y == y_hat:
accuracy+=1
print('Accuracy: '+ str(round(accuracy/test_partition.n,6)) +' ('+str(accuracy) + ' out of ' + str(test_partition.n) +' correct)')
print(confusion_matrix)
main()
| [
"39743074+ldakir@users.noreply.github.com"
] | 39743074+ldakir@users.noreply.github.com |
8d1dcda3139a9d6e5d1dcd75a2e85017e18a0a4a | 78c3082e9082b5b50435805723ae00a58ca88e30 | /03.AI알고리즘 소스코드/venv/Lib/site-packages/caffe2/python/operator_test/flatten_op_test.py | ba5fce81296a516900f9cabf049c0c697338ce54 | [] | no_license | jinStar-kimmy/algorithm | 26c1bc456d5319578110f3d56f8bd19122356603 | 59ae8afd8d133f59a6b8d8cee76790fd9dfe1ff7 | refs/heads/master | 2023-08-28T13:16:45.690232 | 2021-10-20T08:23:46 | 2021-10-20T08:23:46 | 419,217,105 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 960 | py |
from hypothesis import given
import numpy as np
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
class TestFlatten(hu.HypothesisTestCase):
@given(X=hu.tensor(min_dim=2, max_dim=4),
**hu.gcs)
def test_flatten(self, X, gc, dc):
for axis in range(X.ndim + 1):
op = core.CreateOperator(
"Flatten",
["X"],
["Y"],
axis=axis)
def flatten_ref(X):
shape = X.shape
outer = np.prod(shape[:axis]).astype(int)
inner = np.prod(shape[axis:]).astype(int)
return np.copy(X).reshape(outer, inner),
self.assertReferenceChecks(gc, op, [X], flatten_ref)
# Check over multiple devices
self.assertDeviceChecks(dc, op, [X], [0])
if __name__ == "__main__":
import unittest
unittest.main()
| [
"gudwls3126@gmail.com"
] | gudwls3126@gmail.com |
a4d9cd67be8d8087ca60582c8e2dacbdbc3aac6e | 54e0c677471942aa35386e810e7fa54753bbecce | /Assignment 3 submission/preprocess.py | 23b0a97ab08a87dcc6522ff1fa19a31f8c302d1f | [] | no_license | itsuncheng/COMP4901I_Assignments | b27dd86d4e5a438c54c1298478bcc07efe2ea879 | 3bbe115056900515a4fa8fee1973219f9efdf334 | refs/heads/master | 2021-05-16T21:39:59.170470 | 2020-03-27T08:43:26 | 2020-03-27T08:43:26 | 250,479,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,657 | py | import pandas as pd
import re
import numpy as np
import pickle
from collections import Counter
import torch
import torch.utils.data as data
PAD_INDEX = 0
UNK_INDEX = 1
def clean(sent):
# clean the data
############################################################
# TO DO
############################################################
sent = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", sent)
sent = re.sub(r"\'s", " \'s", sent)
sent = re.sub(r"\'ve", " \'ve", sent)
sent = re.sub(r"n\'t", " n\'t", sent)
sent = re.sub(r"\'re", " \'re", sent)
sent = re.sub(r"\'d", " \'d", sent)
sent = re.sub(r"\'ll", " \'ll", sent)
sent = re.sub(r",", " , ", sent)
sent = re.sub(r"!", " ! ", sent)
sent = re.sub(r"\(", " \( ", sent)
sent = re.sub(r"\)", " \) ", sent)
sent = re.sub(r"\?", " \? ", sent)
sent = sent.strip().lower()
return sent
class Vocab():
def __init__(self):
self.word2index = {"PAD":PAD_INDEX ,"UNK":UNK_INDEX }
self.word2count = {}
self.index2word = {PAD_INDEX: "PAD", UNK_INDEX: "UNK" }
self.n_words = 2 # Count default tokens
self.word_num = 0
def index_words(self, sentence):
for word in sentence:
self.word_num+=1
if word not in self.word2index:
self.word2index[word] = self.n_words
self.index2word[self.n_words] = word
self.word2count[word] = 1
self.n_words+=1
else:
self.word2count[word]+=1
def Lang(vocab, file_name):
statistic = {"sent_num":0, "word_num":0, "vocab_size":0, "top_ten_words":[], "max_len":0, "avg_len":0, "len_std":0, "class_distribution":{} }
df = pd.read_csv(file_name)
statistic["sent_num"] = len(df)
sent_len_list = []
############################################################
# TO DO
#build vocabulary and statistic
sent_list = list(df["content"])
for sent in sent_list:
sent = str(sent).strip().split()
vocab.index_words(sent)
sent_len_list.append(len(sent))
rating_list = list(df["rating"])
class_dist_dict = dict(Counter(rating_list))
statistic["word_num"] = vocab.word_num
statistic["vocab_size"] = vocab.n_words
statistic["top_ten_words"] = [word for word in dict(Counter(vocab.word2count).most_common(10))]
statistic["max_len"] = max(sent_len_list)
statistic["avg_len"] = sum(sent_len_list) / len(sent_len_list)
statistic["len_std"] = np.std(sent_len_list)
statistic["class_distribution"] = class_dist_dict
############################################################
return vocab, statistic
class Dataset(data.Dataset):
"""Custom data.Dataset compatible with data.DataLoader."""
def __init__(self, data, vocab):
self.id, self.X, self.y = data
self.vocab = vocab
self.num_total_seqs = len(self.X)
self.id = torch.LongTensor(self.id)
if(self.y is not None):self.y = torch.LongTensor(self.y)
def __getitem__(self, index):
"""Returns one data pair (source and target)."""
ind = self.id[index]
X = self.tokenize(self.X[index])
if(self.y is not None):
y = self.y[index]
return torch.LongTensor(X), y, ind
else:
return torch.LongTensor(X), ind
def __len__(self):
return self.num_total_seqs
def tokenize(self, sentence):
return [self.vocab.word2index[word] if word in self.vocab.word2index else UNK_INDEX for word in sentence]
def preprocess(filename, max_len=200, test=False):
df = pd.read_csv(filename)
id_ = [] # review id
rating = [] # rating
content = [] #review content
for i in range(len(df)):
id_.append(int(df['id'][i]))
if not test:
rating.append(int(df['rating'][i]))
sentence = clean(str(df['content'][i]).strip())
sentence = sentence.split()
sent_len = len(sentence)
# here we pad the sequence for whole training set, you can also try to do dynamic padding for each batch by customize collate_fn function
# if you do dynamic padding and report it, we will give 1 points bonus
if sent_len>max_len:
content.append(sentence[:max_len])
else:
content.append(sentence+["PAD"]*(max_len-sent_len))
if test:
len(id_) == len(content)
return (id_, content, None)
else:
assert len(id_) == len(content) ==len(rating)
return (id_, content, rating)
def get_dataloaders(batch_size, max_len):
vocab = Vocab()
vocab, statistic = Lang(vocab, "train.csv")
train_data = preprocess("train.csv", max_len)
dev_data = preprocess("dev.csv", max_len)
test_data = preprocess("test.csv",max_len, test=True)
train = Dataset(train_data, vocab)
dev = Dataset(dev_data, vocab)
test = Dataset(test_data, vocab)
print(statistic)
data_loader_tr = torch.utils.data.DataLoader(dataset=train,
batch_size=batch_size,
shuffle=True)
data_loader_dev = torch.utils.data.DataLoader(dataset=dev,
batch_size=batch_size,
shuffle=False)
data_loader_test = torch.utils.data.DataLoader(dataset=test,
batch_size=batch_size,
shuffle=False)
return data_loader_tr, data_loader_dev, data_loader_test, statistic["vocab_size"]
| [
"itsuncheng2000@gmail.com"
] | itsuncheng2000@gmail.com |
08e27a11dfdd901859ea13372fe5cc91ccfe4f91 | 19a55c290de75ef32f790b96091eeb7a96703ec7 | /tasks/urls.py | 288e95106c5a36f4b38c17b300077d10576d3c58 | [] | no_license | vyshak-sukumaran/ToDo_Project | 49e7603d938d0401fc2d6c6caff8bd6bcf1f4ac1 | 7d368322d59793e42bcf5c00484d36fbf12240d0 | refs/heads/master | 2023-06-28T23:07:13.783844 | 2021-07-31T07:43:58 | 2021-07-31T07:43:58 | 391,287,194 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | from django.urls import path, include
from . import views
urlpatterns = [
path('', views.index, name="index"),
path('update/<str:pk>/', views.update, name='update'),
path('delete/<str:pk>/', views.delete, name='delete'),
]
| [
"vyshakvyshu791@gmail.com"
] | vyshakvyshu791@gmail.com |
86438607b68f0230a38d395d4d3e9ad3da451323 | d68c6fd11a0a034348b5ec4cfd3304048a01e0a8 | /python/docop-hello-world/main.py | c44270d657fe9c00768af56e3c9630cd0e9279c6 | [] | no_license | wfelipe3/KnowBag | 043570ea1fad101d416b49bca04f621661ed159b | 0cf10e8a57076b68dc3ce9327869496027285c96 | refs/heads/master | 2021-01-24T08:15:30.437703 | 2018-02-12T03:32:49 | 2018-02-12T03:32:49 | 26,071,840 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,088 | py | """Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
"""
from docopt import docopt
from subprocess import call
import subprocess
def execute(command):
process = subprocess.Popen(command, stdout=subprocess.PIPE)
lines = process.stdout.readlines()
print("this is a super test" + str(process.stdout.readline()))
print(lines)
out, err = process.communicate()
return out
def test_method():
this_is_a_test = "this is a test"
print(this_is_a_test)
if __name__ == '__main__':
arguments = docopt(__doc__, version='Naval Fate 2.0')
print(arguments)
res = execute(['ls', '-la'])
print("{res} is res".format(res = res))
test_method()
| [
"feliperojas@iMac-de-Felipe.local"
] | feliperojas@iMac-de-Felipe.local |
2432a572bd371ad31357a644beb118c7a6652907 | 9c3584757cda097128e6916a5490056263d038a1 | /cv/urls.py | b4a4aa8a59791c0d56489205762ada1f5cf5f7b1 | [] | no_license | jackcorsi/bridging-coursework | 0921a5e2b7dbba304895022e95707475d176fb36 | a1e5e2e42b3cdcc241356fefc2a53d47320c7e04 | refs/heads/master | 2022-12-12T11:35:28.713567 | 2020-08-31T22:06:28 | 2020-08-31T22:06:28 | 290,881,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.cv, name='cv'),
] | [
"jackcorsiwarren@gmail.com"
] | jackcorsiwarren@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.