file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
infer_lst.py | # ------------------------------------------------------------------------
# Deformable DETR
# Copyright (c) 2020 SenseTime. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 [see LICENSE for details]
# ------------------------------------------------------------------------
# Modified from DETR (ht... | (args):
utils.init_distributed_mode(args)
print("git:\n {}\n".format(utils.get_sha()))
if args.frozen_weights is not None:
assert args.masks, "Frozen training is meant for segmentation only"
print(args)
device = torch.device(args.device)
# fix the seed for reproducibility
seed = ... | main | identifier_name |
server.rs | // use std::{
// hash::Hash,
// str,
// io::Write,
// net::{SocketAddr, IpAddr, Ipv4Addr},
// sync::Mutex,
// time::{Instant}
// };
// use actix_http::{
// body::Body,
// http::{
// header::{CONTENT_TYPE, SERVER},
// HeaderValue, | // NewService,
// Service,
// };
// use actix_server::{ServerConfig};
// use actix_web::dev::Server
use actix::prelude::*;
// use bytes::{BytesMut, Bytes};
// use futures::{
// future::{
// ok,
// join_all,
// Future,
// },
// Async, Poll,
// };
// use serde_json::to_writer;
// use actix_web::{
// App,
// web,
// middl... | // StatusCode,
// },
// Error, Request, Response,
// };
// use actix_service::{ | random_line_split |
server.rs | // use std::{
// hash::Hash,
// str,
// io::Write,
// net::{SocketAddr, IpAddr, Ipv4Addr},
// sync::Mutex,
// time::{Instant}
// };
// use actix_http::{
// body::Body,
// http::{
// header::{CONTENT_TYPE, SERVER},
// HeaderValue,
// StatusCode,
// },
// Error, Request, Response,
// };
// use actix_service::{
// NewServ... |
}
if f {
ss.remove(s);
println!(
"a websocket session removed from server : {} sockets opened",
ss.len()
);
}
}
}
/// request to close all other connections
#[derive(Message)]
pub struct CloseAll;
impl Handler<CloseAll> for... | {
// if ss[i] == msg.addr {
// if v == msg.addr {
s = i;
f = true;
break;
// }
} | conditional_block |
server.rs | // use std::{
// hash::Hash,
// str,
// io::Write,
// net::{SocketAddr, IpAddr, Ipv4Addr},
// sync::Mutex,
// time::{Instant}
// };
// use actix_http::{
// body::Body,
// http::{
// header::{CONTENT_TYPE, SERVER},
// HeaderValue,
// StatusCode,
// },
// Error, Request, Response,
// };
// use actix_service::{
// NewServ... |
}
/// websocket session disconnected
#[derive(Message)]
pub struct Disconnect {
pub addr: Addr<WsSession>,
// pub id : usize,
}
impl Handler<Disconnect> for WsServer {
type Result = ();
fn handle(&mut self, msg: Disconnect, _ctx: &mut Self::Context) -> Self::Result {
println!("a websocket sess... | {
// println!("{:?} joined wsserver", msg.addr);
// let mut s = &mut *self.sessions.get_mut().unwrap();
let s = &mut self.sessions;
s.push(msg.addr); //.downgrade());
println!(
"new web socket added to server : {} sockets opened",
s.len()
);
} | identifier_body |
server.rs | // use std::{
// hash::Hash,
// str,
// io::Write,
// net::{SocketAddr, IpAddr, Ipv4Addr},
// sync::Mutex,
// time::{Instant}
// };
// use actix_http::{
// body::Body,
// http::{
// header::{CONTENT_TYPE, SERVER},
// HeaderValue,
// StatusCode,
// },
// Error, Request, Response,
// };
// use actix_service::{
// NewServ... | (&mut self, msg: Disconnect, _ctx: &mut Self::Context) -> Self::Result {
println!("a websocket session requested disconnect");
let mut s = 0;
let mut f = false;
// let mut ss = &mut *self.sessions.get_mut().unwrap();
let ss = &mut self.sessions;
for i in 0..ss.len() {
... | handle | identifier_name |
rt_network_v3_2_x.py | # -*- coding: utf-8 -*-
"""RT-Network-v3.2.x.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1MfD_C225OafgIsQKVy76p3E2Qam6kXDo
"""
# Commented out IPython magic to ensure Python compatibility.
# %tensorflow_version 2.x
# %load_ext tensorboard
im... | def upsample_minority_TOTs(X_train, y_train, tot_labels, random_state=27):
# contat the training data together.
X = pd.concat([X_train, y_train], axis=1)
# separate majority and minority classes
buckets = {l: X[X.TOT == l] for l in tot_labels}
maj_label, majority = reduce(lambda a,b: b if b[1].shape... | all_users = df.copy()
df = df.loc[df['Name'].isin(chunk_users)]
| random_line_split |
rt_network_v3_2_x.py | # -*- coding: utf-8 -*-
"""RT-Network-v3.2.x.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1MfD_C225OafgIsQKVy76p3E2Qam6kXDo
"""
# Commented out IPython magic to ensure Python compatibility.
# %tensorflow_version 2.x
# %load_ext tensorboard
im... |
# delete sample data
!rm -rf sample_data
# setup nnet tools (for converting model to Stanford's nnet format)
setup_nnet_tools(nnet_tools_path)
# used for conversion to NNet format
from NNet.utils.writeNNet import writeNNet
"""## Download Dataset"""
# GDrive ID of csv file (AllData_ReactionTime.csv)
# https://drive... | if not os.path.exists(nnet_tools_path):
install_nnet_tools(nnet_tools_path)
# add folder to PYTHONPATH & JUPYTER_PATH
update_path_vars([nnet_tools_path]) | identifier_body |
rt_network_v3_2_x.py | # -*- coding: utf-8 -*-
"""RT-Network-v3.2.x.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1MfD_C225OafgIsQKVy76p3E2Qam6kXDo
"""
# Commented out IPython magic to ensure Python compatibility.
# %tensorflow_version 2.x
# %load_ext tensorboard
im... | (scaler, onehot, output_dir):
pkl.dump(scaler, open(f'{output_dir}/scaler.pkl', 'wb'))
pkl.dump(onehot, open(f'{output_dir}/onehot.pkl', 'wb'))
def save_verification_data(modelpath, df, onehot, scaler, tot_bins, tot_labels, outpath='artifacts/verification.csv'):
m = load_model(modelpath)
df['TOT'] = pd... | save_encoders | identifier_name |
rt_network_v3_2_x.py | # -*- coding: utf-8 -*-
"""RT-Network-v3.2.x.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1MfD_C225OafgIsQKVy76p3E2Qam6kXDo
"""
# Commented out IPython magic to ensure Python compatibility.
# %tensorflow_version 2.x
# %load_ext tensorboard
im... |
if encoded:
enc_tot_labels = onehot.get_feature_names(input_features=['TOT'])
print_heading('Encoded Data')
display(pd.concat([pd.DataFrame(X_train_enc, columns=feature_names),
pd.DataFrame(y_train_enc, columns=enc_tot_labels)],
... | print_heading('Unencoded Data')
display(pd.concat([X_train, y_train], axis=1).describe()) | conditional_block |
cdb.py | # CDB implemented in python ... with a pythonic interface!
# Starting point provided by Yusuke Shinyama
# Eric Ritezel -- February 17, 2007
#
# 20070218 - longstream optimization started
# there's something slow about this. low memory usage, though.
# 20070219 - had dream that led to increased performance.
... | a['meat'] = "mew"
a['meat'] = "ouch"
# close the builder
a.close()
print "TEST: %fs to run build" % (time.time() - starttime)
def test_read(fname="test.cdb"):
print "TEST: Doing read of",fname
cdb = reader(fname)
print 'TEST: Should be False: cdb["not a key"] =', cdb.has_key("not a key")
if cdb.h... | random_line_split | |
cdb.py | # CDB implemented in python ... with a pythonic interface!
# Starting point provided by Yusuke Shinyama
# Eric Ritezel -- February 17, 2007
#
# 20070218 - longstream optimization started
# there's something slow about this. low memory usage, though.
# 20070219 - had dream that led to increased performance.
... |
##############################################
###############TESTSUITEBLOCK#################
##############################################
data = make_data(1000)
test_massive(data, massive=10000)
del(data)
test_read(fname='stress.cdb')
exit(1)
# launch profiler test suite
prof = hotshot.Profile("pycdb.prof... | starttime = time.time()
print "TEST: Massive stress test for large databases (%d entries)" % massive
a = builder(fname)
for i in xrange(massive):
a[testlist[i%len(testlist)][0]] = testlist[i%len(testlist)][1]
if not i % (massive / 37): print '.', #print "%3.1f%% complete" % (float(i) / (5*(10**6))*100)
a.... | identifier_body |
cdb.py | # CDB implemented in python ... with a pythonic interface!
# Starting point provided by Yusuke Shinyama
# Eric Ritezel -- February 17, 2007
#
# 20070218 - longstream optimization started
# there's something slow about this. low memory usage, though.
# 20070219 - had dream that led to increased performance.
... | (self,index,single=True):
return_value = []
hash_prime = calc_hash(index)
# pull data from the cached header
headhash = hash_prime % 256
pos_bucket= self.header[headhash + headhash]
ncells = self.header[headhash + headhash + 1]
# since the 256*8 bytes are all zeroed, this means the hash
# was invalid ... | __get | identifier_name |
cdb.py | # CDB implemented in python ... with a pythonic interface!
# Starting point provided by Yusuke Shinyama
# Eric Ritezel -- February 17, 2007
#
# 20070218 - longstream optimization started
# there's something slow about this. low memory usage, though.
# 20070219 - had dream that led to increased performance.
... |
else: self.filep = infile
# attempt to read file from the start
self.filep.seek(0)
self.start = self.filep.tell()
# track pointers and hash table data
self.hashbucket = [ array.array('L') for i in range(256) ]
# skip past header storage (file header + 2048)
self.position_hash = self.start + 2048
s... | self.filep = open(infile, "w+b") | conditional_block |
gimli.rs | //! Support for symbolication using the `gimli` crate on crates.io
//!
//! This implementation is largely a work in progress and is off by default for
//! all platforms, but it's hoped to be developed over time! Long-term this is
//! intended to wholesale replace the `libbacktrace.rs` implementation.
use self::gimli::... |
fn avma_to_svma(&self, addr: *const u8) -> Option<(usize, *const u8)> {
self.libraries
.iter()
.enumerate()
.filter_map(|(i, lib)| {
// First up, test if this `lib` has any segment containing the
// `addr` (handling relocation). If this c... | {
// A very small, very simple LRU cache for debug info mappings.
//
// The hit rate should be very high, since the typical stack doesn't cross
// between many shared libraries.
//
// The `addr2line::Context` structures are pretty expensive to create. Its
// cost ... | identifier_body |
gimli.rs | //! Support for symbolication using the `gimli` crate on crates.io
//!
//! This implementation is largely a work in progress and is off by default for
//! all platforms, but it's hoped to be developed over time! Long-term this is
//! intended to wholesale replace the `libbacktrace.rs` implementation.
use self::gimli::... | }
struct Library {
name: OsString,
/// Segments of this library loaded into memory, and where they're loaded.
segments: Vec<LibrarySegment>,
/// The "bias" of this library, typically where it's loaded into memory.
/// This value is added to each segment's stated address to get the actual
/// vi... | /// Note that this is basically an LRU cache and we'll be shifting things
/// around in here as we symbolize addresses.
mappings: Vec<(usize, Mapping)>, | random_line_split |
gimli.rs | //! Support for symbolication using the `gimli` crate on crates.io
//!
//! This implementation is largely a work in progress and is off by default for
//! all platforms, but it's hoped to be developed over time! Long-term this is
//! intended to wholesale replace the `libbacktrace.rs` implementation.
use self::gimli::... | <'a>(&'a mut self, lib: usize) -> Option<&'a Context<'a>> {
let idx = self.mappings.iter().position(|(idx, _)| *idx == lib);
// Invariant: after this conditional completes without early returning
// from an error, the cache entry for this path is at index 0.
if let Some(idx) = idx {
... | mapping_for_lib | identifier_name |
main.py | # By Monsterovich
# This script reposts user's track from the comments
from soundcloud import Client as Soundcloud
from requests import HTTPError
from time import strftime, time, gmtime
import logging
import os
import sys
import imp
from scgb.database import Database
BOT_VERSION = '1.3.3'
banlist = {
'user': {... |
def process_comment(comment):
"""Process a single comment."""
if not comment.body:
logging.info('Empty URL detected.')
return 'Your comment is empty.'
if comment.user_id in banlist['user']:
logging.info('Banned user id: %d', comment.user_id)
return 'You... |
if config.use_advanced_description and should_update_description:
update_description() | random_line_split |
main.py | # By Monsterovich
# This script reposts user's track from the comments
from soundcloud import Client as Soundcloud
from requests import HTTPError
from time import strftime, time, gmtime
import logging
import os
import sys
import imp
from scgb.database import Database
BOT_VERSION = '1.3.3'
banlist = {
'user': {... | (user_id, resource_type, resource_id):
"""Repost a resource into the group and update the database."""
logging.info('Reposting %s %d...', resource_type, resource_id)
soundcloud.put('/e1/me/{}_reposts/{}'.format(resource_type, resource_id))
db.record_repost(user_id, resource_type, resource_id)
db.com... | group_repost | identifier_name |
main.py | # By Monsterovich
# This script reposts user's track from the comments
from soundcloud import Client as Soundcloud
from requests import HTTPError
from time import strftime, time, gmtime
import logging
import os
import sys
import imp
from scgb.database import Database
BOT_VERSION = '1.3.3'
banlist = {
'user': {... |
def init_api():
"""Authenticate with SoundCloud API.
Cache access token in the secrets file."""
global soundcloud
import json
SECRETS_VERSION = 1
# Load secrets file
if os.path.exists(config.token_cache):
with open(config.token_cache, 'r', encoding='utf-8') as f:
... | global db
global config
# Init log
logging.basicConfig(stream=sys.stdout, level=logging.INFO, datefmt='[%Y-%m-%d %H:%M:%S]', format='%(asctime)s %(levelname)s %(message)s')
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
# ... | identifier_body |
main.py | # By Monsterovich
# This script reposts user's track from the comments
from soundcloud import Client as Soundcloud
from requests import HTTPError
from time import strftime, time, gmtime
import logging
import os
import sys
import imp
from scgb.database import Database
BOT_VERSION = '1.3.3'
banlist = {
'user': {... |
def process_comment(comment):
"""Process a single comment."""
if not comment.body:
logging.info('Empty URL detected.')
return 'Your comment is empty.'
if comment.user_id in banlist['user']:
logging.info('Banned user id: %d', comment.user_id)
return 'Yo... | update_description() | conditional_block |
sigma-form-upload-file.component.ts | import { Component, Input, OnInit, ChangeDetectionStrategy, Optional, Self, ViewChild, ViewEncapsulation, ChangeDetectorRef, ElementRef } from '@angular/core';
import { ControlValueAccessor, FormControl, NgControl, FormControlName, AbstractControl } from '@angular/forms';
import { CONST_SHARED } from '../../constantes-... | *
* Método que permite validar el tipo de archivo con los permitidos
*
* @param control Control de formulario al cual se le asociará el mensaje de falla o éxito
*/
acceptValid(control: FormControl) {
if (this.accept) {
let validate = this.setValidateFile(control.validator, InputFileAcceptsValidat... | equired = true;
}
}
}
/* | conditional_block |
sigma-form-upload-file.component.ts | import { Component, Input, OnInit, ChangeDetectionStrategy, Optional, Self, ViewChild, ViewEncapsulation, ChangeDetectorRef, ElementRef } from '@angular/core';
import { ControlValueAccessor, FormControl, NgControl, FormControlName, AbstractControl } from '@angular/forms';
import { CONST_SHARED } from '../../constantes-... | addErrors() {
if (this.errors.length > 0) {
this.errors.map(item => {
this.basicErrors.push(item);
});
}
}
validateShowAttachFile(): Boolean {
if (this.control.disabled) {
return false;
}
if (this.maxUpload === 0 ) {
return true;
}
if ( this.files && (... | */ | random_line_split |
sigma-form-upload-file.component.ts | import { Component, Input, OnInit, ChangeDetectionStrategy, Optional, Self, ViewChild, ViewEncapsulation, ChangeDetectorRef, ElementRef } from '@angular/core';
import { ControlValueAccessor, FormControl, NgControl, FormControlName, AbstractControl } from '@angular/forms';
import { CONST_SHARED } from '../../constantes-... | l.disabled) {
return false;
}
if (this.control.errors) {
if (Object.keys(this.control.errors).length > 0) {
return false;
}
}
if (!this.autoUpdate) {
return false;
}
return true;
}
/**
* Método encargado de adicionar los archivos cargados por el cliente a... | (this.contro | identifier_name |
sigma-form-upload-file.component.ts | import { Component, Input, OnInit, ChangeDetectionStrategy, Optional, Self, ViewChild, ViewEncapsulation, ChangeDetectorRef, ElementRef } from '@angular/core';
import { ControlValueAccessor, FormControl, NgControl, FormControlName, AbstractControl } from '@angular/forms';
import { CONST_SHARED } from '../../constantes-... | is.onTouch();
this.response();
this.status = 'reseting';
this.detectChange();
this.action = 'edit';
this.ngOnInit();
}
/**
* Método que permite la asignación de los errores de tipos de archivo
* a la sección de errores del formulario
*/
setMensajeErrorTipoArchivo() {
this.errorTi... | .response();
this.status = 'rewrite';
this.detectChange();
this.clearInputHiden();
}
resetFormConditions() {
th | identifier_body |
types.rs | use javascriptcore_sys::*;
use std::convert::TryFrom; | macro_rules! retain_release {
($name:ident, $ffi_ref:ty, $retain_fn:tt, $drop_fn:tt) => {
impl Drop for $name {
fn drop(&mut self) {
unsafe { $drop_fn(self.0) };
}
}
impl Clone for $name {
fn clone(&self) -> $name {
let x =... | use std::ffi::CString;
use std::ops::Deref;
use std::ptr::{null, null_mut};
| random_line_split |
types.rs | use javascriptcore_sys::*;
use std::convert::TryFrom;
use std::ffi::CString;
use std::ops::Deref;
use std::ptr::{null, null_mut};
macro_rules! retain_release {
($name:ident, $ffi_ref:ty, $retain_fn:tt, $drop_fn:tt) => {
impl Drop for $name {
fn drop(&mut self) {
unsafe { $drop_f... |
}
impl fmt::Debug for Exception {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Exception")
.field("stack", &self.stack())
.field("message", &self.message())
.finish()
}
}
impl fmt::Display for Exception {
fn fmt(&self, f: &mut fmt::For... | {
let mut s = f.debug_struct("Object");
unsafe {
let array = JSObjectCopyPropertyNames(*self.0, self.1);
let size = JSPropertyNameArrayGetCount(array);
for i in 0..size {
let js_ref = JSPropertyNameArrayGetNameAtIndex(array, i);
let pr... | identifier_body |
types.rs | use javascriptcore_sys::*;
use std::convert::TryFrom;
use std::ffi::CString;
use std::ops::Deref;
use std::ptr::{null, null_mut};
macro_rules! retain_release {
($name:ident, $ffi_ref:ty, $retain_fn:tt, $drop_fn:tt) => {
impl Drop for $name {
fn drop(&mut self) {
unsafe { $drop_f... | <'a>(&'a self, script: &'a String) -> Result<Value, Exception> {
self.evaluate_script_sync(script)
}
pub fn add_function(
&self,
name: &str,
callback: JsCallback,
) -> Result<(), Box<dyn std::error::Error>> {
let name = String::new(name).unwrap();
let obj = s... | evaluate_script | identifier_name |
setting.go | package handle
import (
"encoding/json"
"fmt"
"itflow/cache"
"itflow/db"
"itflow/encrypt"
"itflow/internal/response"
"itflow/internal/role"
"itflow/internal/user"
"net/http"
"strings"
"time"
"github.com/hyahm/golog"
"github.com/hyahm/xmux"
)
func CreateUser(w http.ResponseWriter, r *http.Request) {
er... | db.Mconn.GetRows(getallsql, uid)
if err != nil {
golog.Error(err)
w.Write(errorcode.ErrorE(err))
return
}
for adminrows.Next() {
ul := &user.User{}
err = adminrows.Scan(&ul.Id, &ul.Createtime, &ul.Realname, &ul.Nickname, &ul.Email,
&ul.Disable, &ul.RoleGroup, &ul.StatusGroup, &ul.Position)
... | rite(send)
return
} else {
getallsql := `select u.id,createtime,realname,nickname,email,disable, j.name from
user as u join jobs as j
on u.jid in (select id from jobs where hypo=(select jid from user where id=?))`
adminrows, err := | conditional_block |
setting.go | package handle
import (
"encoding/json"
"fmt"
"itflow/cache"
"itflow/db"
"itflow/encrypt"
"itflow/internal/response"
"itflow/internal/role"
"itflow/internal/user"
"net/http"
"strings"
"time"
"github.com/hyahm/golog"
"github.com/hyahm/xmux"
)
func CreateUser(w http.ResponseWriter, r *http.Request) {
er... | r))
return
}
_, err = db.Mconn.Update("update bugs set dustbin=ABS(dustbin-1) where uid=?", id)
if err != nil {
golog.Error(err)
w.Write(errorcode.ErrorE(err))
return
}
send, _ := json.Marshal(errorcode)
w.Write(send)
return
}
// 显示自己能管理的权限,不显示自己的
func UserList(w http.ResponseWriter, r *http.Request) ... | e.ErrorE(er | identifier_name |
setting.go | package handle
import (
"encoding/json"
"fmt"
"itflow/cache"
"itflow/db"
"itflow/encrypt"
"itflow/internal/response"
"itflow/internal/role"
"itflow/internal/user"
"net/http"
"strings"
"time"
"github.com/hyahm/golog"
"github.com/hyahm/xmux"
)
func CreateUser(w http.ResponseWriter, r *http.Request) {
er... | te(errorcode.ErrorE(err))
return
}
send, _ := json.Marshal(errorcode)
w.Write(send)
return
}
| Marshal(ts)
w.Write(send)
return
}
func ResetPwd(w http.ResponseWriter, r *http.Request) {
errorcode := &response.Response{}
rp := xmux.GetInstance(r).Data.(*user.ResetPassword)
newpassword := encrypt.PwdEncrypt(rp.Password, cache.Salt)
updatepwdsql := "update user set password=? where id=?"
_, err := db.M... | identifier_body |
setting.go | package handle
import (
"encoding/json"
"fmt"
"itflow/cache"
"itflow/db"
"itflow/encrypt"
"itflow/internal/response"
"itflow/internal/role" | "github.com/hyahm/golog"
"github.com/hyahm/xmux"
)
func CreateUser(w http.ResponseWriter, r *http.Request) {
errorcode := &response.Response{}
// nickname := xmux.GetInstance(r).Get("nickname").(string)
uid := xmux.GetInstance(r).Get("uid").(int64)
createTime := time.Now().Unix()
getuser := xmux.GetInstance(r... | "itflow/internal/user"
"net/http"
"strings"
"time"
| random_line_split |
util.js | // import {openLoadingCommon,closeLoadingCommon,notice,alert_util,alertNotice,openView,dataToForm,cleanForm} from './common'
import Vue from "vue";
/**
* 公共方法
* @constructor
*/
var Utils;
Utils = {
utilSubString: function (val, length) {
if (val.length <= length) {
return val
} else if (val.length ... | aram dictMap 数据字典Map
*/
initConditionData(arr, dictMap) {
for (let i = 0; i < arr.length; i++) {
var key = arr[i].dict || arr[i].key;
if (key && dictMap[key]) {
this.attachDataMap(arr[i], dictMap[key]);
}
}
},
/**
* 获取字典表Map。
* @param callback 字典获取之后回调执行函数
*
*/
... | 数组
* @p | identifier_name |
util.js | // import {openLoadingCommon,closeLoadingCommon,notice,alert_util,alertNotice,openView,dataToForm,cleanForm} from './common'
import Vue from "vue";
/**
* 公共方法
* @constructor
*/
var Utils;
Utils = {
utilSubString: function (val, length) {
if (val.length <= length) {
return val
} else if (val.length ... | _time"))*/
};
export default Utils
| (tDSL[k]));
} else {
ret = !(v === tDSL[k]);
}
if (ret) {
break;
}
}
return ret;
}
/* function getInervalHour(startDate, endDate) {
var ms = endDate.getTime() - startDate.getTime();
if (ms < 0) return 0;
return Math.floor(ms/1000/60/60);
}
console.log("... | identifier_body |
util.js | // import {openLoadingCommon,closeLoadingCommon,notice,alert_util,alertNotice,openView,dataToForm,cleanForm} from './common'
import Vue from "vue";
/**
* 公共方法
* @constructor
*/
var Utils;
Utils = {
utilSubString: function (val, length) {
if (val.length <= length) {
return val
} else if (val.length ... | 查询条件的选项标签添加对应值映射和key。
* @param arr 查询条件数组
* @param dictMap 数据字典Map
*/
initConditionData(arr, dictMap) {
for (let i = 0; i < arr.length; i++) {
var key = arr[i].dict || arr[i].key;
if (key && dictMap[key]) {
this.attachDataMap(arr[i], dictMap[key]);
}
}
},
/**
* 获取字典表M... | ion (item, index) {
map[item[key]] = item[value]
})
}
return map
},
/**
* 给多个 | conditional_block |
util.js | // import {openLoadingCommon,closeLoadingCommon,notice,alert_util,alertNotice,openView,dataToForm,cleanForm} from './common'
import Vue from "vue";
/**
* 公共方法
* @constructor
*/
var Utils;
Utils = {
utilSubString: function (val, length) {
if (val.length <= length) {
return val
} else if (val.length ... | var decryptedStr = decrypt.toString(CryptoJS.enc.Utf8);
return decryptedStr.toString();
},
/* 数组对象排序,array-数组对象,key-排序字段,status=0-升序,status=1-降序 */
sortByKey(array, key, status) {
return array.sort(function (a, b) {
var x = a[key];
var y = b[key];
if (status === 0) {
return ... | });
| random_line_split |
Assembler.py | import sys
import os
""" CONSTANTS """
# A constant representing the width of a word in RAM
CMD_LEN = 16
# Predefined symbols. These are the constants that will be used in assembly
SP = 0
LCL = 1
ARG = 2
THIS = 3
THAT = 4
SCREEN = 16384
KBD = 24576
# The last number of RAM to be reserved
RAM_RESERVE_E... |
def assemble_file(assembly_file_name, hack_file_name):
"""
A function that receives names of an .asm file and a .hack file.
The function will create the specified .hack file, and using helper
functions will write to it hack machine code, line by line, respective to
the supplied assembly c... | """
This function process an assembly file before it's translation to machine
code. It creates a dictionary, and places into it all markers in the code,
and assigns each one of them it's location in code, allowing to use it as
a reference in future. While doing so, it deletes each marker's
decl... | identifier_body |
Assembler.py | import sys
import os
""" CONSTANTS """
# A constant representing the width of a word in RAM
CMD_LEN = 16
# Predefined symbols. These are the constants that will be used in assembly
SP = 0
LCL = 1
ARG = 2
THIS = 3
THAT = 4
| RAM_RESERVE_END = 16
# A constant representing the first place in RAM available for variables
VAR_FIRST_MEM = 16
""" Global variables"""
# A global variable representing the number of variables created in the
# supplied assembly code. When translating multiple files, this variable is
# set to 0 at the beginning... | SCREEN = 16384
KBD = 24576
# The last number of RAM to be reserved
| random_line_split |
Assembler.py | import sys
import os
""" CONSTANTS """
# A constant representing the width of a word in RAM
CMD_LEN = 16
# Predefined symbols. These are the constants that will be used in assembly
SP = 0
LCL = 1
ARG = 2
THIS = 3
THAT = 4
SCREEN = 16384
KBD = 24576
# The last number of RAM to be reserved
RAM_RESERVE_E... |
# A only commands
elif compute_command == "0":
suffix = "101010"
elif compute_command == "1":
suffix = "111111"
elif compute_command == "-1":
suffix = "111010"
elif compute_command == "D":
suffix = "001100"
elif compute_command == "!D":
suffi... | suffix = "010101" | conditional_block |
Assembler.py | import sys
import os
""" CONSTANTS """
# A constant representing the width of a word in RAM
CMD_LEN = 16
# Predefined symbols. These are the constants that will be used in assembly
SP = 0
LCL = 1
ARG = 2
THIS = 3
THAT = 4
SCREEN = 16384
KBD = 24576
# The last number of RAM to be reserved
RAM_RESERVE_E... | (hack_file, marker_dictionary, cmd):
"""
This function writes a translated assembly name as hack machine code into
the supplied .hack file. The function uses helper functions to translate
code according to the type of code.
:param hack_file: a .hack file (destination for hack machine code)
... | write_cmd | identifier_name |
unqfy.js | const picklify = require('picklify') // para cargar/guarfar unqfy
const fs = require('fs') // para cargar/guarfar unqfy
const lyricFinderModule = require('../musicMatch') // contiene la request a MusicMatch
const populatorModule = require('../spotify')// contiene la funcion de request a spotify
const LyricFinder = requ... | () { return this._nextId }
/////////////////////
addUser({name, email}) {
const newUser = new UserCreation(this, {name, email}).handle()
this._entitiesRepository.add('user', newUser)
return newUser
}
verifyId(id){
return this._nextId >= id
}
registerListening(userId, trackId) {
... | id | identifier_name |
unqfy.js | const picklify = require('picklify') // para cargar/guarfar unqfy
const fs = require('fs') // para cargar/guarfar unqfy
const lyricFinderModule = require('../musicMatch') // contiene la request a MusicMatch
const populatorModule = require('../spotify')// contiene la funcion de request a spotify
const LyricFinder = requ... |
getPlaylistById(id) { return this._entitiesRepository.findBy('playlist', {prop: 'id', value: id}) }
getUserById(id) { return this._entitiesRepository.findBy('user' , {prop: 'id', value: id}) }
getArtistByName(aName) { return this._entitiesRepository.findBy('artist', { prop: 'name', value: aName }) ... | { return this._entitiesRepository.findBy('track' , {prop: 'id', value: id}) } | identifier_body |
unqfy.js | const picklify = require('picklify') // para cargar/guarfar unqfy
const fs = require('fs') // para cargar/guarfar unqfy
const lyricFinderModule = require('../musicMatch') // contiene la request a MusicMatch
const populatorModule = require('../spotify')// contiene la funcion de request a spotify
const LyricFinder = requ... | const artist = this._getAuthorOfAlbum(album)
this._removeFromAllPlaylists(album.tracks)
artist.removeAlbum(album)
}
/* TRACK */
addTrack(albumId, {name, duration, genres}) {
const lyricsProvider = this.lyricsProvider;
const newTrack = new TrackCreation(this, {name, duration, genres, lyricsPro... | removeAlbum(albumId) {
const album = this.getAlbumById(albumId) | random_line_split |
unqfy.js | const picklify = require('picklify') // para cargar/guarfar unqfy
const fs = require('fs') // para cargar/guarfar unqfy
const lyricFinderModule = require('../musicMatch') // contiene la request a MusicMatch
const populatorModule = require('../spotify')// contiene la funcion de request a spotify
const LyricFinder = requ... |
}
updateArtist(artistId, artistData) {
const artist = this.getArtistById(artistId)
artist.update(artistData)
return artist
}
updateAlbum(albumId, { year }) {
const album = this.getAlbumById(albumId)
album.update({year})
return album
}
async populateAlbumsForArtist(artistName) {... | {
throw new ArtistNotFound(artistName)
} | conditional_block |
tag.rs | //! A dictionary-based tagger. The raw format is tuples of the form `(word, lemma, part-of-speech)`
//! where each word typically has multiple entries with different part-of-speech tags.
use crate::types::*;
use bimap::BiMap;
use fs_err::File;
use fst::{IntoStreamer, Map, Streamer};
use indexmap::IndexMap;
use log::er... | (
&self,
word: &str,
add_lower: bool,
add_lower_if_empty: bool,
) -> Vec<WordData> {
let mut tags = self.get_raw(&word);
let lower = word.to_lowercase();
if (add_lower || (add_lower_if_empty && tags.is_empty()))
&& (word != lower
&... | get_strict_tags | identifier_name |
tag.rs | //! A dictionary-based tagger. The raw format is tuples of the form `(word, lemma, part-of-speech)`
//! where each word typically has multiple entries with different part-of-speech tags.
use crate::types::*;
use bimap::BiMap;
use fs_err::File;
use fst::{IntoStreamer, Map, Streamer};
use indexmap::IndexMap;
use log::er... |
}
tags
}
/// Get the words with the same lemma as the given lemma.
pub fn get_group_members(&self, lemma: &str) -> Vec<&str> {
self.word_store
.get_by_left(lemma)
.and_then(|x| self.groups.get(x))
.map(|vec| vec.iter().map(|x| self.str_for_word_... | {
let indices = word
.char_indices()
.take(std::cmp::max(n_chars - 4, 0) as usize)
.skip(1)
.map(|x| x.0);
// the word always has at least one char if the above condition is satisfied
// but s... | conditional_block |
tag.rs | //! A dictionary-based tagger. The raw format is tuples of the form `(word, lemma, part-of-speech)`
//! where each word typically has multiple entries with different part-of-speech tags.
use crate::types::*;
use bimap::BiMap;
use fs_err::File;
use fst::{IntoStreamer, Map, Streamer};
use indexmap::IndexMap;
use log::er... | let reader = std::io::BufReader::new(file);
for line in reader.lines() {
let line = line?;
if line.starts_with('#') {
continue;
}
if disallowed.contains(&line) {
continue;
}
... | random_line_split | |
tag.rs | //! A dictionary-based tagger. The raw format is tuples of the form `(word, lemma, part-of-speech)`
//! where each word typically has multiple entries with different part-of-speech tags.
use crate::types::*;
use bimap::BiMap;
use fs_err::File;
use fst::{IntoStreamer, Map, Streamer};
use indexmap::IndexMap;
use log::er... |
}
| {
self.word_store
.get_by_left(lemma)
.and_then(|x| self.groups.get(x))
.map(|vec| vec.iter().map(|x| self.str_for_word_id(x)).collect())
.unwrap_or_else(Vec::new)
} | identifier_body |
base.py | ################################################################################
# MIT License
#
# Copyright (c) 2017 Jean-Charles Fosse & Johann Bigler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in t... |
def _subscribe(self):
self._meta.database.connection.subscribe(self.propagate_update, u"wamp.postgresql.propagadate.{0}".format(self._meta.name))
def propagate_update(self, dictValues):
if dictValues["id"] == self.id:
for field, value in dictValues.iteritems():
sel... | """
Save a row
"""
# For each field get the value to insert
values = {key : self._meta.fields[key].insert_format(value) for key, value in self.dictValues.items()}
if self._meta.primary_key:
# If an id exist then we should update
if self.id:
... | identifier_body |
base.py | ################################################################################
# MIT License
#
# Copyright (c) 2017 Jean-Charles Fosse & Johann Bigler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in t... |
else:
yield self.insert(values)
def _subscribe(self):
self._meta.database.connection.subscribe(self.propagate_update, u"wamp.postgresql.propagadate.{0}".format(self._meta.name))
def propagate_update(self, dictValues):
if dictValues["id"] == self.id:
for field, ... | if self.id:
pk = yield self.update(values)
if self._meta.propagate:
self._meta.database.propagate(self)
# Else it means we should create the row
else:
# XXX To Do: What happen if insert failed. What should we return
... | conditional_block |
base.py | ################################################################################
# MIT License
#
# Copyright (c) 2017 Jean-Charles Fosse & Johann Bigler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in t... | (self, name, value):
"""
Overide __setattr__ to update dict value and field value at once
"""
object.__setattr__(self, name, value)
if name in self.dictValues: # If updating a field value
if self._meta.fields[name].salt: # field is salt
# If field is ... | __setattr__ | identifier_name |
base.py | ################################################################################
# MIT License
#
# Copyright (c) 2017 Jean-Charles Fosse & Johann Bigler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in t... | if dictValues["id"] == self.id:
for field, value in dictValues.iteritems():
self.__setattr__(field, value) |
def propagate_update(self, dictValues): | random_line_split |
rca.rs | // Optimization for RCA
// Ordinarily, just a, b, c, and d are scanned separately and then combined by joins.
// a: (each product, each city) // can be cut on drill 1
// b: (all products, each city)
// c: (each product, all cities) // can be cut on drill 1
// d: (all products, all cities)
//
// Note that external cuts ... | (
table: &TableSql,
cuts: &[CutSql],
drills: &[DrilldownSql],
meas: &[MeasureSql],
rca: &RcaSql,
) -> (String, String)
{
// append the correct rca drill to drilldowns
// for a, both
// for b, d2
// for c, d1
// for d, none
let mut a_drills = drills.to_vec();
let mut b... | calculate | identifier_name |
rca.rs | // Optimization for RCA
// Ordinarily, just a, b, c, and d are scanned separately and then combined by joins.
// a: (each product, each city) // can be cut on drill 1
// b: (all products, each city)
// c: (each product, all cities) // can be cut on drill 1
// d: (all products, all cities)
//
// Note that external cuts ... | else {
format!("groupArray({col}_{alias_postfix}) as {col}_{alias_postfix}_s", col=l.key_column, alias_postfix=alias_postfix)
}
})
});
let group_array_rca_drill_2 = join(group_array_rca_drill_2, ", ");
let join_array_rca_drill_2 = rca.drill_2.iter()
... | {
format!("groupArray({key_col}_{alias_postfix}) as {key_col}_{alias_postfix}_s, groupArray({name_col}_{alias_postfix}) as {name_col}_{alias_postfix}_s", key_col=l.key_column, name_col=name_col, alias_postfix=alias_postfix)
} | conditional_block |
rca.rs | // Optimization for RCA
// Ordinarily, just a, b, c, and d are scanned separately and then combined by joins.
// a: (each product, each city) // can be cut on drill 1
// b: (all products, each city)
// c: (each product, all cities) // can be cut on drill 1
// d: (all products, all cities)
//
// Note that external cuts ... | //
// The optimization is to derive the c and d aggregates from a and b. Since cuts are allowed on the
// first drill in the rca, both a and b have to be scanned (b cannot be cut on the first drill).
//
// In clickhouse there is no partition, so it's trickier to do what looks like two different group
// by.
//
// The g... | // drill dim). | random_line_split |
rca.rs | // Optimization for RCA
// Ordinarily, just a, b, c, and d are scanned separately and then combined by joins.
// a: (each product, each city) // can be cut on drill 1
// b: (all products, each city)
// c: (each product, all cities) // can be cut on drill 1
// d: (all products, all cities)
//
// Note that external cuts ... | {
// append the correct rca drill to drilldowns
// for a, both
// for b, d2
// for c, d1
// for d, none
let mut a_drills = drills.to_vec();
let mut b_drills = drills.to_vec();
let mut c_drills = drills.to_vec();
let d_drills = drills.to_vec();
a_drills.extend_from_slice(&rca... | identifier_body | |
acss.go | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
// package acss implements "Asynchronous Complete Secret Sharing" as described in
//
// https://iotaledger.github.io/crypto-tss/talks/async-dkg/slides-async-dkg.html#/5/6
//
// Here is a copy of the pseudo code from the slide mentioned above (just ... | nc (a *acssImpl) peerIndex(peer gpa.NodeID) int {
for i := range a.peerIdx {
if a.peerIdx[i] == peer {
return i
}
}
return -1
}
func (a *acssImpl) Output() gpa.Output {
if a.output {
return &Output{
PriShare: a.outS,
Commits: a.rbcOut.Commits,
}
}
return nil
}
func (a *acssImpl) StatusString()... |
}
}
fu | identifier_name |
acss.go | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
// package acss implements "Asynchronous Complete Secret Sharing" as described in
//
// https://iotaledger.github.io/crypto-tss/talks/async-dkg/slides-async-dkg.html#/5/6
//
// Here is a copy of the pseudo code from the slide mentioned above (just ... | a.log.Warnf("implicate received from unknown peer: %v", msg.sender)
return nil
}
//
// Check message duplicates.
if _, ok := a.implicateRecv[msg.sender]; ok {
// Received the implicate before, just ignore it.
return nil
}
a.implicateRecv[msg.sender] = true
//
// Check implicate.
secret, err := crypto.C... | // NOTE: We assume `if out == true:` stands for a wait for such condition.
func (a *acssImpl) handleImplicate(msg *msgImplicateRecover) gpa.OutMessages {
peerIndex := a.peerIndex(msg.sender)
if peerIndex == -1 { | random_line_split |
acss.go | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
// package acss implements "Asynchronous Complete Secret Sharing" as described in
//
// https://iotaledger.github.io/crypto-tss/talks/async-dkg/slides-async-dkg.html#/5/6
//
// Here is a copy of the pseudo code from the slide mentioned above (just ... | > to all parties
// >
// > on receiving <READY> from n-f parties:
// > if sᵢ is valid:
// > out = true
// > output sᵢ
func (a *acssImpl) handleVoteREADY(msg *msgVote) gpa.OutMessages {
a.voteREADYRecv[msg.Sender()] = true
count := len(a.voteREADYRecv)
msgs := gpa.NoMessages()
if !a.voteREADYSent && count ... |
if !a.voteREADYSent && count >= (a.n-a.f) {
a.voteREADYSent = true
return a.broadcastVote(msgVoteREADY, gpa.NoMessages())
}
return nil
}
// > on receiving <READY> from f+1 parties:
// > send <READY | identifier_body |
acss.go | // Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
// package acss implements "Asynchronous Complete Secret Sharing" as described in
//
// https://iotaledger.github.io/crypto-tss/talks/async-dkg/slides-async-dkg.html#/5/6
//
// Here is a copy of the pseudo code from the slide mentioned above (just ... | r the algorithm is the secret to share.
// It can be provided by the dealer only.
func (a *acssImpl) Input(input gpa.Input) gpa.OutMessages {
if a.me != a.dealer {
panic(errors.New("only dealer can initiate the sharing"))
}
if input == nil {
panic(errors.New("we expect kyber.Scalar as input"))
}
return a.handl... | gpa.NewOwnHandler(me, &a)
}
// Input fo | conditional_block |
discord.go | package dolphin
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/diamondburned/arikawa/api"
"github.com/diamondburned/arikawa/discord"
"github.com/diamondburned/arikawa/gateway"
"github.com/diamondburned/arikawa/state"
"github.com/diamondburned/arikawa/webhook"
"gitlab.com/EbonJaeger/dolphin/rcon"
)
... |
bot.channel = discord.ChannelID(snowflake)
} else {
return nil, errors.New("no channel ID configured")
}
bot.watcher = NewWatcher(self.Username, *Config.Minecraft.CustomDeathKeywords)
return bot, discordErr
}
// Close cleans up the watcher and closes the Discord session.
func (bot *DiscordBot) Close() error... | {
return nil, discordErr
} | conditional_block |
discord.go | package dolphin
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/diamondburned/arikawa/api"
"github.com/diamondburned/arikawa/discord"
"github.com/diamondburned/arikawa/gateway"
"github.com/diamondburned/arikawa/state"
"github.com/diamondburned/arikawa/webhook"
"gitlab.com/EbonJaeger/dolphin/rcon"
)
... | () {
// Make our messages channel
mc := make(chan *MinecraftMessage)
// Start our Minecraft watcher
go bot.watcher.Watch(mc)
for {
// Read message from the channel
msg := <-mc
Log.Debugf("Received a line from Minecraft: Username='%s', Text='%s'\n", msg.Username, msg.Message)
// Don't send messages that a... | WaitForMessages | identifier_name |
discord.go | package dolphin
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/diamondburned/arikawa/api"
"github.com/diamondburned/arikawa/discord"
"github.com/diamondburned/arikawa/gateway"
"github.com/diamondburned/arikawa/state"
"github.com/diamondburned/arikawa/webhook"
"gitlab.com/EbonJaeger/dolphin/rcon"
)
... | {
// Format command to send to the Minecraft server
command := fmt.Sprintf("tellraw @a %s", Config.Minecraft.TellrawTemplate)
command = strings.Replace(command, "%username%", username, -1)
command = strings.Replace(command, "%message%", content, -1)
// Create RCON connection
conn, err := rcon.Dial(Config.Minecra... | identifier_body | |
discord.go | package dolphin
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/diamondburned/arikawa/api"
"github.com/diamondburned/arikawa/discord"
"github.com/diamondburned/arikawa/gateway"
"github.com/diamondburned/arikawa/state"
"github.com/diamondburned/arikawa/webhook"
"gitlab.com/EbonJaeger/dolphin/rcon"
)
... | }
for _, member := range message.Mentions {
content = strings.Replace(content, fmt.Sprintf("<@!%s>", member.ID), fmt.Sprintf("@%s", member.Username), -1)
}
// Escape quote characters
content = strings.Replace(content, "\"", "\\\"", -1)
return content
}
func sendToMinecraft(content, username string) error {
... |
content = strings.Replace(content, fmt.Sprintf("<#%s>", id), fmt.Sprintf("#%s", channel.Name), -1)
} | random_line_split |
mod.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fs::{self, File};
use std::path::Path;
use glium;
use glium::backend::Facade;
use image::{self, DynamicImage, GenericImage, Rgba};
use texture_packer::Rect;
use texture_packer::SkylinePacker;
use texture_packer::{TexturePacker, TexturePackerConfig};
use t... |
fn texture_size(&self, texture_idx: usize) -> (u32, u32) {
self.textures[texture_idx].dimensions()
}
fn get_frame(&self, tile_type: &str) -> &AtlasFrame {
let tex_name = &self.config.locations[tile_type];
&self.config.frames[tex_name]
}
pub fn get_tile_texture_idx(&self, ... | {
self.texture_size(frame.texture_idx)
} | identifier_body |
mod.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fs::{self, File};
use std::path::Path;
use glium;
use glium::backend::Facade;
use image::{self, DynamicImage, GenericImage, Rgba};
use texture_packer::Rect;
use texture_packer::SkylinePacker;
use texture_packer::{TexturePacker, TexturePackerConfig};
use t... | (&self, tile_idx: usize, msecs: u64) -> (f32, f32) {
let kind = self.get_tile_kind_indexed(tile_idx);
self.get_texture_offset(kind, msecs)
}
pub fn get_texture(&self, idx: usize) -> &Texture2d {
&self.textures[idx]
}
pub fn passes(&self) -> usize {
self.textures.len()
... | get_texture_offset_indexed | identifier_name |
mod.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fs::{self, File};
use std::path::Path;
use glium;
use glium::backend::Facade;
use image::{self, DynamicImage, GenericImage, Rgba};
use texture_packer::Rect;
use texture_packer::SkylinePacker;
use texture_packer::{TexturePacker, TexturePackerConfig};
use t... | pub fn new(texture_idx: usize, rect: Rect, tile_size: (u32, u32)) -> Self {
AtlasFrame {
tile_size: tile_size,
texture_idx: texture_idx,
rect: AtlasRect::from(rect),
offsets: HashMap::new(),
}
}
}
pub type TilePacker<'a> = TexturePacker<'a, Dynami... | offsets: HashMap<String, AtlasTile>,
}
impl AtlasFrame { | random_line_split |
compensation_disp.py | from Propagation import selectMode
from Animation import Anim_dyspersji
import matplotlib.pyplot as plt
import numpy as np
def find_accurate_len(actual_len, factor=8):
min_len = factor * actual_len
estimated_len = 256
while estimated_len < min_len:
estimated_len *= 2
return estimated_len
def p... | w, freq_sampling_kHz, omega):
value = -1
for ind in range(len(freq_sampling_kHz)-1):
if freq_sampling_kHz[ind] == omega:
value = G_w[ind]
break
elif freq_sampling_kHz[ind] < omega and freq_sampling_kHz[ind + 1] > omega:
a = (G_w[ind] - G_w[ind+1])/(freq_sampli... | ct[ind] < temp_k and k_vect[ind + 1] > temp_k:
omega = mode.findPointWithGivenK_rad_s([mode.points[ind], mode.points[ind+1]], temp_k)
break
return omega
def find_value_by_omega_in_G_w(G_ | conditional_block |
compensation_disp.py | from Propagation import selectMode
from Animation import Anim_dyspersji
import matplotlib.pyplot as plt
import numpy as np
def find_accurate_len(actual_len, factor=8):
min_len = factor * actual_len
estimated_len = 256
while estimated_len < min_len:
estimated_len *= 2
return estimated_len
def p... | edMode('../../../Dane/25mm_stal/Node4_25_8_8/kvect', '../../../Dane/25mm_stal/Node4_25_8_8/omega')
KD3.selectMode()
KD3.plot_modes(30)
dist = 2 # w metrach
signal_array3, time_x_freq3 = Anim_dyspersji.get_chirp()
signal3 = wave_length_propagation([time_x_freq3[0], signal_array3[3]], [1, 2, 3], KD3... | [0]
dt = time[-1]/len(time)
frequency_from_numpy = np.fft.rfftfreq(len(signal[1]), d=dt)*1e-3
G_w = np.sqrt(signal_after_fft.real**2 + signal_after_fft.imag**2)
#znalezienie najsilniejszej/średniej omegi
max_g = G_w[0]
max_ind = 0
for ind, g in enumerate(G_w):
if g>max_g:
... | identifier_body |
compensation_disp.py | from Propagation import selectMode
from Animation import Anim_dyspersji
import matplotlib.pyplot as plt
import numpy as np
def find_accurate_len(actual_len, factor=8):
min_len = factor * actual_len
estimated_len = 256
while estimated_len < min_len:
estimated_len *= 2
return estimated_len
def p... | k_temp = Anim_dyspersji.curve_sampling(mode.all_omega_khz, dispercion_curves.k_v, [f_Nyq_kHz])
if k_temp > max_k_Nyq:
max_k_Nyq = k_temp
return factor*max_k_Nyq[0] # Zwracana wartość jest w rad/m
def calculate_delta_k(max_v_gr, signal_duration, factor=0.9):
# delta k powinno być = ... | random_line_split | |
compensation_disp.py | from Propagation import selectMode
from Animation import Anim_dyspersji
import matplotlib.pyplot as plt
import numpy as np
def find_accurate_len(actual_len, factor=8):
min_len = factor * actual_len
estimated_len = 256
while estimated_len < min_len:
estimated_len *= 2
return estimated_len
def p... | ft = np.fft.rfft(signal[1])
time = signal[0]
dt = time[-1]/len(time)
frequency_from_numpy = np.fft.rfftfreq(len(signal[1]), d=dt)*1e-3
G_w = np.sqrt(signal_after_fft.real**2 + signal_after_fft.imag**2)
#znalezienie najsilniejszej/średniej omegi
max_g = G_w[0]
max_ind = 0
for ind, g in e... | urves):
signal_after_f | identifier_name |
channelamqp.go | /*
Package mqmetric contains a set of routines common to several
commands used to export MQ metrics to different backend
storage mechanisms including Prometheus and InfluxDB.
*/
package mqmetric
/*
Copyright (c) IBM Corporation 2016, 2022
Licensed under the Apache License, Version 2.0 (the "License");
you may n... | v := elem.Int64Value[0]
if v > 0 {
if ci, ok = infoMap[chlName]; !ok {
ci = new(ObjInfo)
infoMap[chlName] = ci
}
ci.AttrChlType = v
ci.exists = true
}
case ibmmq.MQCACH_DESC:
v := elem.String[0]
if v != "" {
if ci, ok = infoMap[chlName]; !ok {
ci = new(ObjInfo)
... |
case ibmmq.MQIACH_CHANNEL_TYPE: | random_line_split |
channelamqp.go | /*
Package mqmetric contains a set of routines common to several
commands used to export MQ metrics to different backend
storage mechanisms including Prometheus and InfluxDB.
*/
package mqmetric
/*
Copyright (c) IBM Corporation 2016, 2022
Licensed under the Apache License, Version 2.0 (the "License");
you may n... |
for k := range amqpInfoMap {
amqpInfoMap[k].AttrCurInst = 0
}
channelPatterns := strings.Split(patterns, ",")
if len(channelPatterns) == 0 {
traceExit("CollectAMQPChannelStatus", 1)
return nil
}
for _, pattern := range channelPatterns {
pattern = strings.TrimSpace(pattern)
if len(pattern) == 0 {
... | {
st.Attributes[k].Values = make(map[string]*StatusValue)
} | conditional_block |
channelamqp.go | /*
Package mqmetric contains a set of routines common to several
commands used to export MQ metrics to different backend
storage mechanisms including Prometheus and InfluxDB.
*/
package mqmetric
/*
Copyright (c) IBM Corporation 2016, 2022
Licensed under the Apache License, Version 2.0 (the "License");
you may n... |
// Issue the INQUIRE_CHANNEL_STATUS command for a channel or wildcarded channel name
// Collect the responses and build up the statistics. Add CLIENTID(*) to get the actual
// instances instead of an aggregated response
func collectAMQPChannelStatus(pattern string, instanceType int32) error {
var err error
traceEn... | {
var err error
traceEntry("CollectAMQPChannelStatus")
ci := getConnection(GetConnectionKey())
os := &ci.objectStatus[OT_CHANNEL_AMQP]
st := GetObjectStatus(GetConnectionKey(), OT_CHANNEL_AMQP)
os.objectSeen = make(map[string]bool) // Record which channels have been seen in this period
ChannelAMQPInitAttribu... | identifier_body |
channelamqp.go | /*
Package mqmetric contains a set of routines common to several
commands used to export MQ metrics to different backend
storage mechanisms including Prometheus and InfluxDB.
*/
package mqmetric
/*
Copyright (c) IBM Corporation 2016, 2022
Licensed under the Apache License, Version 2.0 (the "License");
you may n... | (objectPatternsList string, infoMap map[string]*ObjInfo) error {
var err error
traceEntry("inquireAMQPChannelAttributes")
ci := getConnection(GetConnectionKey())
statusClearReplyQ()
if objectPatternsList == "" {
traceExitErr("inquireAMQPChannelAttributes", 1, err)
return err
}
objectPatterns := strings.S... | inquireAMQPChannelAttributes | identifier_name |
btckey.go | /* btckeygenie v1.0.0
* https://github.com/vsergeev/btckeygenie
* License: MIT
*/
package btckey
import (
"bytes"
"golang.org/x/crypto/ripemd160"
"crypto/sha256"
"fmt"
"io"
"math/big"
"strings"
)
/******************************************************************************/
/* ECDSA Keypair Generation */... | b = append([]byte{0x3f}, b...)
}
/* Verify checksum */
if len(b) < 5 {
return 0, nil, fmt.Errorf("Invalid base-58 check string: missing checksum.")
}
/* Create a new SHA256 context */
sha256_h := sha256.New()
/* SHA256 Hash #1 */
sha256_h.Reset()
sha256_h.Write(b[:len(b)-4])
hash1 := sha256_h.Sum(nil)
... | break
}
| conditional_block |
btckey.go | /* btckeygenie v1.0.0
* https://github.com/vsergeev/btckeygenie
* License: MIT
*/
package btckey
import (
"bytes"
"golang.org/x/crypto/ripemd160"
"crypto/sha256"
"fmt"
"io"
"math/big"
"strings"
)
/******************************************************************************/
/* ECDSA Keypair Generation */... | return fmt.Errorf("Invalid compressed public key bytes, decompression error: %v", err)
}
pub.X = P.X
pub.Y = P.Y
} else if b[0] == 0x04 {
/* Uncompressed public key */
if len(b) != 65 {
return fmt.Errorf("Invalid public key bytes length %d, expected 65.", len(b))
}
pub.X = new(big.Int).SetBytes... |
P, err := secp256k1.Decompress(new(big.Int).SetBytes(b[1:33]), uint(b[0]&0x1))
if err != nil { | random_line_split |
btckey.go | /* btckeygenie v1.0.0
* https://github.com/vsergeev/btckeygenie
* License: MIT
*/
package btckey
import (
"bytes"
"golang.org/x/crypto/ripemd160"
"crypto/sha256"
"fmt"
"io"
"math/big"
"strings"
)
/******************************************************************************/
/* ECDSA Keypair Generation */... | // ToBytes converts a Bitcoin private key to a 32-byte byte slice.
func (priv *PrivateKey) ToBytes() (b []byte) {
d := priv.D.Bytes()
/* Pad D to 32 bytes */
padded_d := append(bytes.Repeat([]byte{0x3f}, 32-len(d)), d...)
return padded_d
}
// FromBytes converts a 32-byte byte slice to a Bitcoin private key and d... | /* See https://en.bitcoin.it/wiki/Wallet_import_format */
/* Base58 Check Decode the WIF string */
ver, priv_bytes, err := b58checkdecode(wif)
if err != nil {
return false, err
}
/* Check that the version byte is 0x80 */
if ver != 0x80 {
return false, fmt.Errorf("Invalid WIF version 0x%02x, expected 0x80."... | identifier_body |
btckey.go | /* btckeygenie v1.0.0
* https://github.com/vsergeev/btckeygenie
* License: MIT
*/
package btckey
import (
"bytes"
"golang.org/x/crypto/ripemd160"
"crypto/sha256"
"fmt"
"io"
"math/big"
"strings"
)
/******************************************************************************/
/* ECDSA Keypair Generation */... | (address string) {
/* See https://en.bitcoin.it/wiki/Technical_background_of_Bitcoin_addresses */
/* Convert the public key to bytes */
pub_bytes := pub.ToBytesUncompressed()
/* SHA256 Hash */
sha256_h := sha256.New()
sha256_h.Reset()
sha256_h.Write(pub_bytes)
pub_hash_1 := sha256_h.Sum(nil)
/* RIPEMD-160 ... | AddressUncompressed() | identifier_name |
unbond.rs | use crate::contract::{query_total_issued, slashing};
use crate::state::{
get_finished_amount, get_unbond_batches, read_config, read_current_batch, read_parameters,
read_state, read_unbond_history, remove_unbond_wait_list, store_current_batch, store_state,
store_unbond_history, store_unbond_wait_list, Unbond... |
pub fn handle_withdraw_unbonded<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
) -> StdResult<HandleResponse> {
let sender_human = env.message.sender.clone();
let contract_address = env.contract.address.clone();
// read params
let params = read_parameters(&deps.storage... | {
// Read params
let params = read_parameters(&deps.storage).load()?;
let epoch_period = params.epoch_period;
let threshold = params.er_threshold;
let recovery_fee = params.peg_recovery_fee;
let mut current_batch = read_current_batch(&deps.storage).load()?;
// Check slashing, update state,... | identifier_body |
unbond.rs | use crate::contract::{query_total_issued, slashing};
use crate::state::{
get_finished_amount, get_unbond_batches, read_config, read_current_batch, read_parameters,
read_state, read_unbond_history, remove_unbond_wait_list, store_current_batch, store_state,
store_unbond_history, store_unbond_wait_list, Unbond... | <S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
amount: Uint128,
sender: HumanAddr,
) -> StdResult<HandleResponse> {
// Read params
let params = read_parameters(&deps.storage).load()?;
let epoch_period = params.epoch_period;
let threshold = params.er_threshold;
... | handle_unbond | identifier_name |
unbond.rs | use crate::contract::{query_total_issued, slashing};
use crate::state::{
get_finished_amount, get_unbond_batches, read_config, read_current_batch, read_parameters,
read_state, read_unbond_history, remove_unbond_wait_list, store_current_batch, store_state,
store_unbond_history, store_unbond_wait_list, Unbond... | })?;
// Send the money to the user
let msgs = vec![BankMsg::Send {
from_address: contract_address.clone(),
to_address: sender_human,
amount: coins(withdraw_amount.u128(), &*coin_denom),
}
.into()];
let res = HandleResponse {
messages: msgs,
log: vec![
... | Ok(last_state) | random_line_split |
main.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Context as _,
fidl::prelude::*,
fidl_fuchsia_net_http as net_http,
fuchsia_async::{self as fasync, TimeoutExt as _},
fuch... | (mut self, loader_client: net_http::LoaderClientProxy) -> Result<(), zx::Status> {
let client = fhyper::new_https_client_from_tcp_options(tcp_options());
loop {
break match client.request(self.build_request()).await {
Ok(hyper_response) => {
let redirect =... | start | identifier_name |
main.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Context as _,
fidl::prelude::*,
fidl_fuchsia_net_http as net_http,
fuchsia_async::{self as fasync, TimeoutExt as _},
fuch... |
async fn fetch(
mut self,
) -> Result<(hyper::Response<hyper::Body>, hyper::Uri, hyper::Method), net_http::Error> {
let deadline = self.deadline;
if deadline < fasync::Time::now() {
return Err(net_http::Error::DeadlineExceeded);
}
let client = fhyper::new_ht... | {
let client = fhyper::new_https_client_from_tcp_options(tcp_options());
loop {
break match client.request(self.build_request()).await {
Ok(hyper_response) => {
let redirect = redirect_info(&self.url, &self.method, &hyper_response);
if ... | identifier_body |
main.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::Context as _,
fidl::prelude::*,
fidl_fuchsia_net_http as net_http,
fuchsia_async::{self as fasync, TimeoutExt as _},
fuch... | }
fn to_error_response(error: net_http::Error) -> net_http::Response {
net_http::Response {
error: Some(error),
body: None,
final_url: None,
status_code: None,
status_line: None,
headers: None,
redirect: None,
..net_http::Response::EMPTY
}
}
stru... | random_line_split | |
spritecfg.rs | #![allow(dead_code)]
extern crate asar;
use std::path::{PathBuf, Path};
use std::io::prelude::*;
use std::fs::{File, OpenOptions};
use nom::*;
use asar::rom::RomBuf;
use parse_aux::dys_prefix;
use genus::Genus;
use dys_tables::DysTables;
use insert_err::{InsertResult, format_result, warnless_result, single_error};
#... |
fn read_byte(s: &str) -> Result<u8, CfgErr> {
let iter = s.trim().chars();
let mut n = 0u32;
let mut read = false;
for ch in iter {
if let Some(v) = ch.to_digit(0x10) {
n *= 0x10;
n += v;
read = true;
} else {
return Err(CfgErr { explain: String::from("Non-byte data in byte field") })
}
}
if... | {
let mut it = buf.split_whitespace().skip(1);
let mut d = [0u8; 9];
for output_byte in &mut d {
if let Some(s) = it.next() {
*output_byte = try!(read_byte(s));
} else {
return Err(CfgErr{ explain: String::from("Old-style CFG too short") });
}
};
let (name, name_set) = default_name(path, gen, id);
le... | identifier_body |
spritecfg.rs | #![allow(dead_code)]
extern crate asar;
use std::path::{PathBuf, Path};
use std::io::prelude::*;
use std::fs::{File, OpenOptions};
use nom::*;
use asar::rom::RomBuf;
use parse_aux::dys_prefix;
use genus::Genus;
use dys_tables::DysTables;
use insert_err::{InsertResult, format_result, warnless_result, single_error};
#... |
if init == 0 && self.needs_init() {
return single_error("No init routine");
}
if drop == 0 && self.needs_drop() {
return single_error("Drop routine required by dys_opts, but not provided");
}
if drop != 0 && !self.needs_drop() {
return single_error("Sprite... | {
return single_error("No main routine");
} | conditional_block |
spritecfg.rs | #![allow(dead_code)]
extern crate asar;
use std::path::{PathBuf, Path};
use std::io::prelude::*;
use std::fs::{File, OpenOptions};
use nom::*;
use asar::rom::RomBuf;
use parse_aux::dys_prefix;
use genus::Genus;
use dys_tables::DysTables;
use insert_err::{InsertResult, format_result, warnless_result, single_error};
#... | (&self, ebit: bool) -> &String {
if ebit && self.name_set.is_some() {
self.name_set.as_ref().unwrap()
} else {
&self.name
}
}
pub fn desc(&self, ebit: bool) -> &String {
if ebit && self.desc_set.is_some() {
self.desc_set.as_ref().unwrap()
} else {
&self.desc
}
}
pub fn uses_ebit(&self) -> ... | name | identifier_name |
spritecfg.rs | #![allow(dead_code)]
extern crate asar;
use std::path::{PathBuf, Path};
use std::io::prelude::*;
use std::fs::{File, OpenOptions};
use nom::*;
use asar::rom::RomBuf;
use parse_aux::dys_prefix;
use genus::Genus;
use dys_tables::DysTables;
use insert_err::{InsertResult, format_result, warnless_result, single_error};
#... | {
let mut tempasm = OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(temp)
.unwrap();
tempasm.write_all(prelude.as_bytes()).unwrap();
let mut source_buf = Vec::<u8>::with_capacity(8 ... | random_line_split | |
models.py | # encoding: utf-8
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.db.models import signals
from django.core import validators
from django.core.exceptions import ValidationError
import time
from django.db.models import Q
from datetime import date
import model_audit... | :
name = models.CharField("Name", max_length=100, unique=True)
description = models.TextField("Beschreibung", max_length=1000, default="")
core = models.BooleanField("Kernbereich", default=False)
hidden = models.BooleanField("versteckt", default=False)
coordinator = models.ForeignKey(Loco, on_delete... | bose_name_plural = "Anteilscheine"
class Taetigkeitsbereich(models.Model) | identifier_body |
models.py | # encoding: utf-8
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.db.models import signals
from django.core import validators
from django.core.exceptions import ValidationError
import time
from django.db.models import Q
from datetime import date
import model_audit... |
print 'get_abos_by_size', self, result
return result
"""
def small_abos(self):
return len(self.active_abos().filter(Q(groesse=1) | Q(groesse=3)))
def big_abos(self):
return len(self.active_abos().filter(Q(groesse=2) | Q(groesse=3) | Q(groesse=4))) + len(self.act... | if abo_size is not Abo.SIZE_NONE:
result[abo_size] = len(self.active_abos().filter(groesse=abo_size)) | conditional_block |
models.py | # encoding: utf-8
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.db.models import signals
from django.core import validators
from django.core.exceptions import ValidationError
import time
from django.db.models import Q
from datetime import date
import model_audit... | # user class is only used for logins, permissions, and other builtin django stuff
# all user information should be stored in the Loco model
user = models.OneToOneField(User, related_name='loco', null=True, blank=True)
first_name = models.CharField("Vorname", max_length=30)
last_name = models.CharFi... | random_line_split | |
models.py | # encoding: utf-8
import datetime
from django.db import models
from django.contrib.auth.models import User
from django.db.models import signals
from django.core import validators
from django.core.exceptions import ValidationError
import time
from django.db.models import Q
from datetime import date
import model_audit... | ):
"""
Callback to create corresponding loco when new user is created.
"""
if created:
username = helpers.make_username(instance.first_name, instance.last_name, instance.email)
user = User(username=username)
user.save()
user = User.objects.... | **kdws | identifier_name |
jwt.rs | //! # A Firestore Auth Session token is a Javascript Web Token (JWT). This module contains JWT helper functions.
use crate::credentials::Credentials;
use crate::errors::FirebaseError;
use biscuit::jwa::SignatureAlgorithm;
use biscuit::{ClaimPresenceOptions, SingleOrMultiple, StringOrUri, ValidationOptions};
use chrono... | .get(&format!(
"https://www.googleapis.com/service_accounts/v1/jwk/{}",
account_mail
))
.send()
.await?;
let jwk_set: JWKSetDTO = resp.json().await?;
Ok(jwk_set)
}
/// Returns true if the access token (assumed to be a jwt) has expired
///
/// An error is ... | pub async fn download_google_jwks_async(account_mail: &str) -> Result<JWKSetDTO, Error> {
let resp = reqwest::Client::new() | random_line_split |
jwt.rs | //! # A Firestore Auth Session token is a Javascript Web Token (JWT). This module contains JWT helper functions.
use crate::credentials::Credentials;
use crate::errors::FirebaseError;
use biscuit::jwa::SignatureAlgorithm;
use biscuit::{ClaimPresenceOptions, SingleOrMultiple, StringOrUri, ValidationOptions};
use chrono... | {
#[serde(flatten)]
pub(crate) headers: biscuit::jws::RegisteredHeader,
#[serde(flatten)]
pub(crate) ne: biscuit::jwk::RSAKeyParameters,
}
#[derive(Serialize, Deserialize)]
pub struct JWKSetDTO {
pub keys: Vec<JWSEntry>,
}
/// Download the Google JWK Set for a given service account.
/// The resul... | JWSEntry | identifier_name |
jwt.rs | //! # A Firestore Auth Session token is a Javascript Web Token (JWT). This module contains JWT helper functions.
use crate::credentials::Credentials;
use crate::errors::FirebaseError;
use biscuit::jwa::SignatureAlgorithm;
use biscuit::{ClaimPresenceOptions, SingleOrMultiple, StringOrUri, ValidationOptions};
use chrono... |
}
pub(crate) fn verify_access_token(
credentials: &Credentials,
access_token: &str,
) -> Result<TokenValidationResult, Error> {
verify_access_token_with_claims(credentials, access_token)
}
pub fn verify_access_token_with_claims<T: PrivateClaims>(
credentials: &Credentials,
access_token: &str,
) -... | {
self.claims.get_scopes()
} | identifier_body |
jwt.rs | //! # A Firestore Auth Session token is a Javascript Web Token (JWT). This module contains JWT helper functions.
use crate::credentials::Credentials;
use crate::errors::FirebaseError;
use biscuit::jwa::SignatureAlgorithm;
use biscuit::{ClaimPresenceOptions, SingleOrMultiple, StringOrUri, ValidationOptions};
use chrono... |
Ok(true)
}
/// Returns true if the jwt was updated and needs signing
pub(crate) fn jwt_update_expiry_if(jwt: &mut AuthClaimsJWT, expire_in_minutes: i64) -> bool {
let ref mut claims = jwt.payload_mut().unwrap().registered;
let now = biscuit::Timestamp::from(Utc::now());
if let Some(issued_at) = clai... | {
let diff: Duration = Utc::now().signed_duration_since(expiry.deref().clone());
return Ok(diff.num_minutes() - tolerance_in_minutes > 0);
} | conditional_block |
spy.rs | use crate::{backend::Backend, error::error};
use cloudevents::{
event::{Data, ExtensionValue},
AttributesReader, Event,
};
use drogue_cloud_service_api::{EXT_APPLICATION, EXT_DEVICE};
use itertools::Itertools;
use patternfly_yew::*;
use unicode_segmentation::UnicodeSegmentation;
use wasm_bindgen::{closure::Clos... | let on_error = Closure::wrap(Box::new(move || {
link.send_message(Msg::Failed);
}) as Box<dyn FnMut()>);
source.set_onerror(Some(&on_error.into_js_value().into()));
// store result
self.running = true;
self.source = Some(source);
}
fn stop(&mut self... | // setup onerror
let link = self.link.clone(); | random_line_split |
spy.rs | use crate::{backend::Backend, error::error};
use cloudevents::{
event::{Data, ExtensionValue},
AttributesReader, Event,
};
use drogue_cloud_service_api::{EXT_APPLICATION, EXT_DEVICE};
use itertools::Itertools;
use patternfly_yew::*;
use unicode_segmentation::UnicodeSegmentation;
use wasm_bindgen::{closure::Clos... | String, pub Html);
impl TableRenderer for AttributeEntry {
fn render(&self, index: ColumnIndex) -> Html {
match index.index {
0 => html! {&self.0},
1 => self.1.clone(),
_ => html! {},
}
}
}
fn render_details(event: &Event) -> Html {
let mut attrs: Vec<A... | ibuteEntry(pub | identifier_name |
service.rs | use std::io::Read;
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use futures::{self, Future, BoxFuture};
use curl::easy::{Easy, List};
use tokio_core::reactor::Handle;
use tokio_curl::{Session, PerformError};
use serde_json::{from_value, from_str, Value};
pub type Fut<T> = BoxFuture<T, PerformError>;... | if task.is_none() {
continue;
}
let task = task.unwrap();
timestamp = task.timestamp;
cpus.push(task.cpus_user_time_secs + task.cpus_system_time_secs);
mems.push(100.0 * task.mem_rss_bytes as f64 /
... | let mut timestamp: f64 = 0.0;
for task in tasks { | random_line_split |
service.rs | use std::io::Read;
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
use futures::{self, Future, BoxFuture};
use curl::easy::{Easy, List};
use tokio_core::reactor::Handle;
use tokio_curl::{Session, PerformError};
use serde_json::{from_value, from_str, Value};
pub type Fut<T> = BoxFuture<T, PerformError>;... |
pub fn scale(&mut self, app: &App) -> Fut<()> {
let instances = (app.instances as f64 * self.multiplier).ceil() as i64;
if instances > app.max_instances {
info!("Cannot scale {}, reached maximum instances of: {}",
app.name, app.max_instances);
return futur... | {
let mut futs = Vec::new();
for (id, slave_id) in &app.tasks {
let url = slaves.get::<String>(&slave_id).unwrap().to_string();
futs.push(self.get_task_statistic(url, id));
}
let mut prev_timestamp = 0.0;
let mut prev_cpu_time = 0.0;
if let Some... | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.